about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--corp/rih/frontend/Cargo.lock20
-rw-r--r--corp/rih/frontend/Cargo.toml2
-rw-r--r--corp/russian/predlozhnik/Cargo.lock20
-rw-r--r--corp/russian/predlozhnik/Cargo.toml2
-rw-r--r--corp/russian/predlozhnik/default.nix1
-rw-r--r--docs/REVIEWS.md41
-rw-r--r--fun/clbot/clbot.go24
-rw-r--r--fun/clbot/clbot_test.go24
-rw-r--r--ops/kontemplate/default.nix7
-rw-r--r--ops/kontemplate/deps.nix111
-rw-r--r--ops/kontemplate/go.mod25
-rw-r--r--ops/kontemplate/go.sum75
-rw-r--r--ops/kontemplate/main.go2
-rw-r--r--ops/kontemplate/templater/templater.go2
-rw-r--r--ops/machines/whitby/default.nix12
-rw-r--r--ops/modules/clbot.nix11
-rw-r--r--ops/modules/teleirc.nix40
-rw-r--r--ops/secrets/secrets.nix7
-rw-r--r--ops/secrets/teleirc.agebin0 -> 1006 bytes
-rw-r--r--ops/terraform/deploy-nixos/README.md12
-rw-r--r--ops/users/default.nix25
-rw-r--r--third_party/exwm/exwm-core.el70
-rw-r--r--third_party/exwm/exwm-floating.el10
-rw-r--r--third_party/exwm/exwm-input.el2
-rw-r--r--third_party/exwm/exwm-layout.el7
-rw-r--r--third_party/exwm/exwm-systemtray.el11
-rw-r--r--third_party/exwm/exwm-workspace.el10
-rw-r--r--third_party/exwm/exwm-xsettings.el2
-rw-r--r--third_party/exwm/exwm.el19
-rw-r--r--third_party/geesefs/default.nix25
-rw-r--r--third_party/nixpkgs/default.nix8
-rw-r--r--third_party/overlays/patches/crate2nix-drop-darwin-explicit-dontstrip.patch22
-rw-r--r--third_party/overlays/tvl.nix37
-rw-r--r--third_party/sources/sources.json42
-rw-r--r--third_party/teleirc/default.nix23
-rw-r--r--tvix/Cargo.lock708
-rw-r--r--tvix/Cargo.nix2821
-rw-r--r--tvix/Cargo.toml6
-rw-r--r--tvix/README.md8
-rw-r--r--tvix/boot/README.md2
-rw-r--r--tvix/build-go/build.pb.go2
-rw-r--r--tvix/build-go/rpc_build.pb.go2
-rw-r--r--tvix/build/Cargo.toml11
-rw-r--r--tvix/build/default.nix12
-rw-r--r--tvix/build/src/bin/tvix-build.rs36
-rw-r--r--tvix/castore-go/castore.pb.go2
-rw-r--r--tvix/castore-go/rpc_blobstore.pb.go2
-rw-r--r--tvix/castore-go/rpc_directory.pb.go2
-rw-r--r--tvix/castore/Cargo.toml16
-rw-r--r--tvix/castore/default.nix33
-rw-r--r--tvix/castore/src/blobservice/from_addr.rs8
-rw-r--r--tvix/castore/src/blobservice/grpc.rs25
-rw-r--r--tvix/castore/src/directoryservice/bigtable.rs2
-rw-r--r--tvix/castore/src/directoryservice/closure_validator.rs268
-rw-r--r--tvix/castore/src/directoryservice/combinators.rs142
-rw-r--r--tvix/castore/src/directoryservice/directory_graph.rs413
-rw-r--r--tvix/castore/src/directoryservice/from_addr.rs25
-rw-r--r--tvix/castore/src/directoryservice/grpc.rs41
-rw-r--r--tvix/castore/src/directoryservice/memory.rs2
-rw-r--r--tvix/castore/src/directoryservice/mod.rs14
-rw-r--r--tvix/castore/src/directoryservice/object_store.rs263
-rw-r--r--tvix/castore/src/directoryservice/order_validator.rs181
-rw-r--r--tvix/castore/src/directoryservice/simple_putter.rs14
-rw-r--r--tvix/castore/src/directoryservice/sled.rs16
-rw-r--r--tvix/castore/src/directoryservice/tests/mod.rs1
-rw-r--r--tvix/castore/src/directoryservice/traverse.rs95
-rw-r--r--tvix/castore/src/directoryservice/utils.rs91
-rw-r--r--tvix/castore/src/fs/fuse/mod.rs (renamed from tvix/castore/src/fs/fuse.rs)59
-rw-r--r--tvix/castore/src/fs/fuse/tests.rs (renamed from tvix/castore/src/fs/tests.rs)55
-rw-r--r--tvix/castore/src/fs/mod.rs25
-rw-r--r--tvix/castore/src/import/archive.rs109
-rw-r--r--tvix/castore/src/import/blobs.rs177
-rw-r--r--tvix/castore/src/import/fs.rs54
-rw-r--r--tvix/castore/src/import/mod.rs2
-rw-r--r--tvix/castore/src/proto/grpc_directoryservice_wrapper.rs100
-rw-r--r--tvix/cli/Cargo.toml13
-rw-r--r--tvix/cli/src/main.rs172
-rw-r--r--tvix/cli/src/repl.rs175
-rw-r--r--tvix/default.nix90
-rw-r--r--tvix/docs/book.toml14
-rw-r--r--tvix/docs/default.nix3
-rw-r--r--tvix/docs/mdbook-admonish.css348
-rw-r--r--tvix/docs/src/SUMMARY.md25
-rw-r--r--tvix/docs/src/TODO.md129
-rw-r--r--tvix/docs/src/architecture.md9
-rw-r--r--tvix/docs/src/castore/blobstore-chunking.md (renamed from tvix/castore/docs/blobstore-chunking.md)6
-rw-r--r--tvix/docs/src/castore/blobstore-protocol.md (renamed from tvix/castore/docs/blobstore-protocol.md)4
-rw-r--r--tvix/docs/src/castore/data-model.md (renamed from tvix/castore/docs/data-model.md)8
-rw-r--r--tvix/docs/src/castore/why-not-git-trees.md (renamed from tvix/castore/docs/why-not-git-trees.md)2
-rw-r--r--tvix/docs/src/eval/abandoned/index.md3
-rw-r--r--tvix/docs/src/eval/abandoned/thread-local-vm.md (renamed from tvix/eval/docs/abandoned/thread-local-vm.md)0
-rw-r--r--tvix/docs/src/eval/bindings.md (renamed from tvix/eval/docs/bindings.md)6
-rw-r--r--tvix/docs/src/eval/build-references.md (renamed from tvix/eval/docs/build-references.md)14
-rw-r--r--tvix/docs/src/eval/builtins.md (renamed from tvix/eval/docs/builtins.md)0
-rw-r--r--tvix/docs/src/eval/catchable-errors.md (renamed from tvix/eval/docs/catchable-errors.md)0
-rw-r--r--tvix/docs/src/eval/known-optimisation-potential.md (renamed from tvix/eval/docs/known-optimisation-potential.md)0
-rw-r--r--tvix/docs/src/eval/language-issues.md (renamed from tvix/eval/docs/language-issues.md)0
-rw-r--r--tvix/docs/src/eval/opcodes-attrsets.md (renamed from tvix/eval/docs/opcodes-attrsets.md)0
-rw-r--r--tvix/docs/src/eval/recursive-attrs.md (renamed from tvix/eval/docs/recursive-attrs.md)0
-rw-r--r--tvix/docs/src/eval/vm-loop.md (renamed from tvix/eval/docs/vm-loop.md)0
-rw-r--r--tvix/docs/src/language-spec.md4
-rw-r--r--tvix/docs/src/nix-daemon/changelog.md202
-rw-r--r--tvix/docs/src/nix-daemon/index.md15
-rw-r--r--tvix/docs/src/nix-daemon/logging.md122
-rw-r--r--tvix/docs/src/nix-daemon/operations.md894
-rw-r--r--tvix/docs/src/nix-daemon/serialization.md305
-rw-r--r--tvix/docs/src/store/api.md (renamed from tvix/store/docs/api.md)6
-rw-r--r--tvix/docs/src/value-pointer-equality.md6
-rw-r--r--tvix/eval/Cargo.toml4
-rw-r--r--tvix/eval/benches/eval.rs6
-rw-r--r--tvix/eval/build.rs7
-rw-r--r--tvix/eval/default.nix13
-rw-r--r--tvix/eval/src/builtins/mod.rs103
-rw-r--r--tvix/eval/src/builtins/to_xml.rs275
-rw-r--r--tvix/eval/src/errors.rs15
-rw-r--r--tvix/eval/src/io.rs6
-rw-r--r--tvix/eval/src/lib.rs2
-rw-r--r--tvix/eval/src/nix_search_path.rs2
-rw-r--r--tvix/eval/src/tests/mod.rs199
-rw-r--r--tvix/eval/src/tests/nix_tests.rs207
-rw-r--r--tvix/eval/src/tests/one_offs.rs2
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.exp1
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.nix10
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.exp.xml5
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.nix1
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-toxml.exp1
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-toxml.nix2
-rw-r--r--tvix/eval/src/value/json.rs8
-rw-r--r--tvix/eval/src/value/mod.rs4
-rw-r--r--tvix/eval/src/value/string.rs74
-rw-r--r--tvix/eval/src/vm/generators.rs2
-rw-r--r--tvix/eval/src/vm/mod.rs4
-rw-r--r--tvix/eval/tests/nix_oracle.rs2
-rw-r--r--tvix/glue/Cargo.toml5
-rw-r--r--tvix/glue/benches/eval.rs6
-rw-r--r--tvix/glue/build.rs6
-rw-r--r--tvix/glue/default.nix11
-rw-r--r--tvix/glue/src/builtins/derivation.rs16
-rw-r--r--tvix/glue/src/builtins/errors.rs6
-rw-r--r--tvix/glue/src/builtins/fetchers.rs19
-rw-r--r--tvix/glue/src/builtins/import.rs40
-rw-r--r--tvix/glue/src/fetchers/decompression.rs6
-rw-r--r--tvix/glue/src/fetchers/mod.rs408
-rw-r--r--tvix/glue/src/fetchurl.rs82
-rw-r--r--tvix/glue/src/known_paths.rs34
-rw-r--r--tvix/glue/src/lib.rs2
-rw-r--r--tvix/glue/src/tests/empty-file0
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-context-introspection.nix2
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.exp2
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.nix40
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-storePath.exp1
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-storePath.nix9
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.exp1
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.nix14
-rw-r--r--tvix/glue/src/tvix_store_io.rs55
-rw-r--r--tvix/nar-bridge-go/.gitignore (renamed from tvix/nar-bridge/.gitignore)0
-rw-r--r--tvix/nar-bridge-go/README.md (renamed from tvix/nar-bridge/README.md)2
-rw-r--r--tvix/nar-bridge-go/cmd/nar-bridge-http/main.go (renamed from tvix/nar-bridge/cmd/nar-bridge-http/main.go)4
-rw-r--r--tvix/nar-bridge-go/cmd/nar-bridge-http/otel.go (renamed from tvix/nar-bridge/cmd/nar-bridge-http/otel.go)0
-rw-r--r--tvix/nar-bridge-go/default.nix (renamed from tvix/nar-bridge/default.nix)2
-rw-r--r--tvix/nar-bridge-go/go.mod (renamed from tvix/nar-bridge/go.mod)2
-rw-r--r--tvix/nar-bridge-go/go.sum (renamed from tvix/nar-bridge/go.sum)0
-rw-r--r--tvix/nar-bridge-go/pkg/http/nar_get.go (renamed from tvix/nar-bridge/pkg/http/nar_get.go)0
-rw-r--r--tvix/nar-bridge-go/pkg/http/nar_put.go (renamed from tvix/nar-bridge/pkg/http/nar_put.go)2
-rw-r--r--tvix/nar-bridge-go/pkg/http/narinfo.go (renamed from tvix/nar-bridge/pkg/http/narinfo.go)0
-rw-r--r--tvix/nar-bridge-go/pkg/http/narinfo_get.go (renamed from tvix/nar-bridge/pkg/http/narinfo_get.go)0
-rw-r--r--tvix/nar-bridge-go/pkg/http/narinfo_put.go (renamed from tvix/nar-bridge/pkg/http/narinfo_put.go)2
-rw-r--r--tvix/nar-bridge-go/pkg/http/server.go (renamed from tvix/nar-bridge/pkg/http/server.go)0
-rw-r--r--tvix/nar-bridge-go/pkg/http/util.go (renamed from tvix/nar-bridge/pkg/http/util.go)0
-rw-r--r--tvix/nar-bridge-go/pkg/importer/blob_upload.go (renamed from tvix/nar-bridge/pkg/importer/blob_upload.go)0
-rw-r--r--tvix/nar-bridge-go/pkg/importer/counting_writer.go (renamed from tvix/nar-bridge/pkg/importer/counting_writer.go)0
-rw-r--r--tvix/nar-bridge-go/pkg/importer/directory_upload.go (renamed from tvix/nar-bridge/pkg/importer/directory_upload.go)0
-rw-r--r--tvix/nar-bridge-go/pkg/importer/gen_pathinfo.go (renamed from tvix/nar-bridge/pkg/importer/gen_pathinfo.go)0
-rw-r--r--tvix/nar-bridge-go/pkg/importer/importer.go (renamed from tvix/nar-bridge/pkg/importer/importer.go)0
-rw-r--r--tvix/nar-bridge-go/pkg/importer/importer_test.go (renamed from tvix/nar-bridge/pkg/importer/importer_test.go)2
-rw-r--r--tvix/nar-bridge-go/pkg/importer/roundtrip_test.go (renamed from tvix/nar-bridge/pkg/importer/roundtrip_test.go)2
-rw-r--r--tvix/nar-bridge-go/pkg/importer/util_test.go (renamed from tvix/nar-bridge/pkg/importer/util_test.go)0
-rw-r--r--tvix/nar-bridge-go/testdata/emptydirectory.nar (renamed from tvix/nar-bridge/testdata/emptydirectory.nar)bin96 -> 96 bytes
-rw-r--r--tvix/nar-bridge-go/testdata/nar_1094wph9z4nwlgvsd53abfz8i117ykiv5dwnq9nnhz846s7xqd7d.nar (renamed from tvix/nar-bridge/testdata/nar_1094wph9z4nwlgvsd53abfz8i117ykiv5dwnq9nnhz846s7xqd7d.nar)bin464152 -> 464152 bytes
-rw-r--r--tvix/nar-bridge-go/testdata/onebyteexecutable.nar (renamed from tvix/nar-bridge/testdata/onebyteexecutable.nar)bin152 -> 152 bytes
-rw-r--r--tvix/nar-bridge-go/testdata/onebyteregular.nar (renamed from tvix/nar-bridge/testdata/onebyteregular.nar)bin120 -> 120 bytes
-rw-r--r--tvix/nar-bridge-go/testdata/popdirectories.nar (renamed from tvix/nar-bridge/testdata/popdirectories.nar)bin600 -> 600 bytes
-rw-r--r--tvix/nar-bridge-go/testdata/symlink.nar (renamed from tvix/nar-bridge/testdata/symlink.nar)bin136 -> 136 bytes
-rw-r--r--tvix/nix-compat/Cargo.toml2
-rw-r--r--tvix/nix-compat/build.rs5
-rw-r--r--tvix/nix-compat/default.nix14
-rw-r--r--tvix/nix-compat/src/nar/reader/mod.rs2
-rw-r--r--tvix/nix-compat/src/nar/wire/mod.rs2
-rw-r--r--tvix/nix-compat/src/wire/bytes/reader/mod.rs2
-rw-r--r--tvix/shell.nix16
-rw-r--r--tvix/store-go/pathinfo.pb.go2
-rw-r--r--tvix/store-go/rpc_pathinfo.pb.go2
-rw-r--r--tvix/store/Cargo.toml18
-rw-r--r--tvix/store/default.nix44
-rw-r--r--tvix/store/src/bin/tvix-store.rs189
-rw-r--r--tvix/store/src/import.rs9
-rw-r--r--tvix/store/src/nar/import.rs75
-rw-r--r--tvix/store/src/nar/mod.rs1
-rw-r--r--tvix/store/src/nar/renderer.rs7
-rw-r--r--tvix/store/src/pathinfoservice/bigtable.rs21
-rw-r--r--tvix/store/src/pathinfoservice/combinators.rs111
-rw-r--r--tvix/store/src/pathinfoservice/from_addr.rs9
-rw-r--r--tvix/store/src/pathinfoservice/grpc.rs40
-rw-r--r--tvix/store/src/pathinfoservice/lru.rs11
-rw-r--r--tvix/store/src/pathinfoservice/memory.rs4
-rw-r--r--tvix/store/src/pathinfoservice/mod.rs2
-rw-r--r--tvix/store/src/pathinfoservice/nix_http.rs30
-rw-r--r--tvix/store/src/pathinfoservice/sled.rs7
-rw-r--r--tvix/store/src/pathinfoservice/tests/mod.rs77
-rw-r--r--tvix/store/src/pathinfoservice/tests/utils.rs38
-rw-r--r--tvix/store/src/utils.rs33
-rw-r--r--tvix/tracing/Cargo.toml43
-rw-r--r--tvix/tracing/default.nix11
-rw-r--r--tvix/tracing/src/lib.rs302
-rw-r--r--tvix/tracing/src/propagate/mod.rs9
-rw-r--r--tvix/tracing/src/propagate/tonic.rs59
-rw-r--r--tvix/utils.nix42
-rw-r--r--tvix/website/landing-en.md2
-rw-r--r--users/Profpatsch/.hlint.yaml2
-rw-r--r--users/Profpatsch/my-prelude/default.nix1
-rw-r--r--users/Profpatsch/my-prelude/my-prelude.cabal1
-rw-r--r--users/Profpatsch/my-prelude/src/Arg.hs34
-rw-r--r--users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs258
-rw-r--r--users/Profpatsch/openlab-tools/src/OpenlabTools.hs56
-rw-r--r--users/Profpatsch/shell.nix2
-rw-r--r--users/Profpatsch/whatcd-resolver/src/AppT.hs5
-rw-r--r--users/Profpatsch/whatcd-resolver/src/Http.hs34
-rw-r--r--users/Profpatsch/whatcd-resolver/src/JsonLd.hs1
-rw-r--r--users/Profpatsch/whatcd-resolver/src/Redacted.hs91
-rw-r--r--users/Profpatsch/whatcd-resolver/src/Transmission.hs31
-rw-r--r--users/Profpatsch/whatcd-resolver/src/WhatcdResolver.hs354
-rw-r--r--users/Profpatsch/whatcd-resolver/whatcd-resolver.cabal4
-rw-r--r--users/flokli/archeology/default.nix8
-rw-r--r--users/flokli/ipu6-softisp/default.nix12
-rw-r--r--users/tazjin/chase-geese/default.nix2
-rw-r--r--users/tazjin/emacs/config/settings.el3
-rw-r--r--users/tazjin/emacs/default.nix2
-rw-r--r--users/tazjin/home/arbat.nix11
-rw-r--r--users/tazjin/keys/default.nix1
-rw-r--r--users/tazjin/nixos/arbat/default.nix74
-rw-r--r--users/tazjin/nixos/default.nix1
-rw-r--r--users/tazjin/nixos/modules/desktop.nix4
-rw-r--r--users/tazjin/nixos/modules/geesefs.nix2
-rw-r--r--users/tazjin/nixos/modules/home-config.nix4
-rw-r--r--users/tazjin/nixos/modules/physical.nix3
-rw-r--r--users/tazjin/nixos/zamalek/default.nix5
-rw-r--r--web/pwcrypt/Cargo.lock20
-rw-r--r--web/pwcrypt/Cargo.toml2
-rw-r--r--web/tvixbolt/Cargo.lock27
-rw-r--r--web/tvixbolt/Cargo.toml2
250 files changed, 9615 insertions, 3592 deletions
diff --git a/corp/rih/frontend/Cargo.lock b/corp/rih/frontend/Cargo.lock
index 2d2f5ea84b..40641d5de3 100644
--- a/corp/rih/frontend/Cargo.lock
+++ b/corp/rih/frontend/Cargo.lock
@@ -1500,9 +1500,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
 
 [[package]]
 name = "wasm-bindgen"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1e124130aee3fb58c5bdd6b639a0509486b0338acaaae0c84a5124b0f588b7f"
+checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
 dependencies = [
  "cfg-if",
  "wasm-bindgen-macro",
@@ -1510,9 +1510,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-backend"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c9e7e1900c352b609c8488ad12639a311045f40a35491fb69ba8c12f758af70b"
+checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
 dependencies = [
  "bumpalo",
  "log",
@@ -1537,9 +1537,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b30af9e2d358182b5c7449424f017eba305ed32a7010509ede96cdc4696c46ed"
+checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
 dependencies = [
  "quote",
  "wasm-bindgen-macro-support",
@@ -1547,9 +1547,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro-support"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66"
+checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1560,9 +1560,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-shared"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4f186bd2dcf04330886ce82d6f33dd75a7bfcf69ecf5763b89fcde53b6ac9838"
+checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
 
 [[package]]
 name = "web-sys"
diff --git a/corp/rih/frontend/Cargo.toml b/corp/rih/frontend/Cargo.toml
index f01378c7e6..1b9d500979 100644
--- a/corp/rih/frontend/Cargo.toml
+++ b/corp/rih/frontend/Cargo.toml
@@ -19,7 +19,7 @@ yew-router = "0.17"
 wasm-bindgen-futures = "0.4"
 
 # needs to be in sync with nixpkgs
-wasm-bindgen = "= 0.2.91"
+wasm-bindgen = "= 0.2.92"
 uuid = { version = "1.3.3", features = ["v4", "serde"] }
 
 [dependencies.serde]
diff --git a/corp/russian/predlozhnik/Cargo.lock b/corp/russian/predlozhnik/Cargo.lock
index 1cacb43b39..6874e3554c 100644
--- a/corp/russian/predlozhnik/Cargo.lock
+++ b/corp/russian/predlozhnik/Cargo.lock
@@ -363,9 +363,9 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
 
 [[package]]
 name = "wasm-bindgen"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1e124130aee3fb58c5bdd6b639a0509486b0338acaaae0c84a5124b0f588b7f"
+checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
 dependencies = [
  "cfg-if",
  "wasm-bindgen-macro",
@@ -373,9 +373,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-backend"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c9e7e1900c352b609c8488ad12639a311045f40a35491fb69ba8c12f758af70b"
+checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
 dependencies = [
  "bumpalo",
  "log",
@@ -400,9 +400,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b30af9e2d358182b5c7449424f017eba305ed32a7010509ede96cdc4696c46ed"
+checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
 dependencies = [
  "quote",
  "wasm-bindgen-macro-support",
@@ -410,9 +410,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro-support"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66"
+checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -423,9 +423,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-shared"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4f186bd2dcf04330886ce82d6f33dd75a7bfcf69ecf5763b89fcde53b6ac9838"
+checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
 
 [[package]]
 name = "web-sys"
diff --git a/corp/russian/predlozhnik/Cargo.toml b/corp/russian/predlozhnik/Cargo.toml
index 87537b560b..0b099ac927 100644
--- a/corp/russian/predlozhnik/Cargo.toml
+++ b/corp/russian/predlozhnik/Cargo.toml
@@ -9,4 +9,4 @@ lazy_static = "1.4"
 yew = "0.19"
 
 # needs to be in sync with nixpkgs
-wasm-bindgen = "= 0.2.91"
+wasm-bindgen = "= 0.2.92"
diff --git a/corp/russian/predlozhnik/default.nix b/corp/russian/predlozhnik/default.nix
index 2137be1112..495e977c13 100644
--- a/corp/russian/predlozhnik/default.nix
+++ b/corp/russian/predlozhnik/default.nix
@@ -44,7 +44,6 @@ pkgs.rustPlatform.buildRustPackage rec {
     export PATH=${lib.makeBinPath deps}:$PATH
     mkdir home
     export HOME=$PWD/.home
-    env
     trunk build --release -d $out
   '';
 
diff --git a/docs/REVIEWS.md b/docs/REVIEWS.md
index d76f11f410..9d2356744d 100644
--- a/docs/REVIEWS.md
+++ b/docs/REVIEWS.md
@@ -5,10 +5,10 @@ TVL Code Reviews
 **Table of Contents**
 
 - [TVL Code Reviews](#tvl-code-reviews)
+    - [Registration](#registration)
     - [Gerrit setup](#gerrit-setup)
     - [Gerrit workflows](#gerrit-workflows)
     - [Review process & approvals](#review-process--approvals)
-    - [Registration](#registration)
     - [Submitting changes via email](#submitting-changes-via-email)
     - [Gerrit for Github users](#gerrit-for-github-users)
 
@@ -22,6 +22,28 @@ contribution guidelines](./CONTRIBUTING.md).
 All changes are tracked at [cl.tvl.fyi](https://cl.tvl.fyi) using Gerrit. See
 [Registration](#registration) for information on how to register an account.
 
+## Registration
+
+The preferred method of contributions & review is done via Gerrit.
+
+TVL’s Gerrit supports single sign-on (SSO) using a GitHub, StackOverflow or
+GitLab.com account.
+
+Additionally if you want prefer not to use an SSO option or wish to have a
+backup authentication strategy in the event of downed server or a keycloak
+issue (recommended), you can create a TVL-specific LDAP account on the Gerrit
+instance by following these instructions:
+
+1. Be a member of `#tvl` on [hackint][].
+2. Clone the depot locally (via `git clone "https://cl.tvl.fyi/depot"`).
+3. Create a user entry in our LDAP server in [ops/users][ops-users].
+
+   The entry can be generated using [//web/pwcrypt](https://signup.tvl.fyi/).
+4. Create a commit adding yourself (see e.g.
+   [CL/2671](https://cl.tvl.fyi/c/depot/+/2671))
+5. If only using LDAP, submit the patch via email (see
+   [Submitting changes via email](#submitting-changes-via-email))
+
 ## Gerrit setup
 
 Gerrit uses the concept of change IDs to track commits across rebases and other
@@ -111,23 +133,8 @@ formalised checks before submitting:
 If all these conditions are fulfilled, the **change author submits their change
 themselves**.
 
-## Registration
-
-You may log into Gerrit using a GitHub, StackOverflow or GitLab.com account.
-
-If you would like to have a TVL-specific account on the Gerrit
-instance, follow these instructions:
-
-1. Be a member of `#tvl` on [hackint][].
-2. Clone the depot locally (via `git clone "https://cl.tvl.fyi/depot"`).
-3. Create a user entry in our LDAP server in [ops/users][ops-users].
-
-   The entry can be generated using [//web/pwcrypt](https://signup.tvl.fyi/).
-4. Create a commit adding yourself (see e.g.
-   [CL/2671](https://cl.tvl.fyi/c/depot/+/2671))
-5. Submit the commit via email (see below).
-
 ## Submitting changes via email
+
 Please keep in mind this process is more complicated and requires more work from
 both sides:
 
diff --git a/fun/clbot/clbot.go b/fun/clbot/clbot.go
index e5a5990ef2..40b044f45d 100644
--- a/fun/clbot/clbot.go
+++ b/fun/clbot/clbot.go
@@ -41,7 +41,8 @@ var (
 	notifyRepo     = flag.String("notify_repo", "depot", "Repo name to notify about")
 	notifyBranches = stringSetFlag{}
 
-	neverPing = flag.String("never_ping", "marcus", "Comma-separated terms that should never ping users")
+	neverPing   = flag.String("never_ping", "marcus", "Comma-separated terms that should never ping users")
+	onlyDisplay = flag.String("only_display", "", "Comma-separated substrings of the gerrit CL Change Subject that should be shown (everything else is dropped)")
 )
 
 func init() {
@@ -51,7 +52,7 @@ func init() {
 type stringSetFlag map[string]bool
 
 func (f stringSetFlag) String() string {
-	return fmt.Sprintf("%q", map[string]bool(f))
+	return fmt.Sprintf("%v", map[string]bool(f))
 }
 func (f stringSetFlag) Set(s string) error {
 	if s == "" {
@@ -193,6 +194,21 @@ func nopingAll(username, message string) string {
 	return strings.ReplaceAll(message, username, noping(username))
 }
 
+// changeShouldBeSkipped applies the list of channels in `onlyDisplay`
+// to whether we should skip displaying a CL.
+func changeShouldBeSkipped(onlyDisplay string, changeSubject string) bool {
+	// case when we don’t want to filter
+	if onlyDisplay == "" {
+		return false
+	}
+	for _, needle := range strings.Split(onlyDisplay, ",") {
+		if strings.Contains(changeSubject, needle) {
+			return false
+		}
+	}
+	return true
+}
+
 func patchSetURL(c gerritevents.Change, p gerritevents.PatchSet) string {
 	return fmt.Sprintf("https://cl.tvl.fyi/%d", c.Number)
 }
@@ -248,13 +264,13 @@ func main() {
 			var parsedMsg string
 			switch e := e.(type) {
 			case *gerritevents.PatchSetCreated:
-				if e.Change.Project != *notifyRepo || !notifyBranches[e.Change.Branch] || e.PatchSet.Number != 1 {
+				if e.Change.Project != *notifyRepo || !notifyBranches[e.Change.Branch] || e.PatchSet.Number != 1 || changeShouldBeSkipped(*onlyDisplay, e.Change.Subject) {
 					continue
 				}
 				user := username(e.PatchSet.Uploader)
 				parsedMsg = nopingAll(user, fmt.Sprintf("CL/%d proposed by %s - %s - %s", e.Change.Number, user, e.Change.Subject, patchSetURL(e.Change, e.PatchSet)))
 			case *gerritevents.ChangeMerged:
-				if e.Change.Project != *notifyRepo || !notifyBranches[e.Change.Branch] {
+				if e.Change.Project != *notifyRepo || !notifyBranches[e.Change.Branch] || changeShouldBeSkipped(*onlyDisplay, e.Change.Subject) {
 					continue
 				}
 				owner := username(e.Change.Owner)
diff --git a/fun/clbot/clbot_test.go b/fun/clbot/clbot_test.go
new file mode 100644
index 0000000000..567540c364
--- /dev/null
+++ b/fun/clbot/clbot_test.go
@@ -0,0 +1,24 @@
+package main
+
+import (
+	"testing"
+)
+
+func TestChangeShouldBeSkipped(t *testing.T) {
+	dontSkipAny := ""
+	if changeShouldBeSkipped(dontSkipAny, "mysubject") {
+		t.Fatal("dontSkipAny should not not be skip any")
+	}
+
+	showThese := "A,B"
+	if changeShouldBeSkipped(showThese, "A") {
+		t.Fatal("A should be shown")
+	}
+	if changeShouldBeSkipped(showThese, "B") {
+		t.Fatal("B should be shown")
+	}
+	if !changeShouldBeSkipped(showThese, "C") {
+		t.Fatal("C should not be shown")
+	}
+
+}
diff --git a/ops/kontemplate/default.nix b/ops/kontemplate/default.nix
index 1190869c3f..be01f9e4f6 100644
--- a/ops/kontemplate/default.nix
+++ b/ops/kontemplate/default.nix
@@ -12,12 +12,11 @@
 
 { lib, pkgs, ... }:
 
-pkgs.buildGoPackage rec {
+pkgs.buildGoModule rec {
   name = "kontemplate-${version}";
   version = "canon";
   src = ./.;
-  goPackagePath = "github.com/tazjin/kontemplate";
-  goDeps = ./deps.nix;
+
   buildInputs = [ pkgs.parallel ];
 
   # Enable checks and configure check-phase to include vet:
@@ -28,6 +27,8 @@ pkgs.buildGoPackage rec {
     done
   '';
 
+  vendorHash = "sha256-xPGVM2dq5fAVOiuodOXhDm3v3k+ncNLhlk6aCtF5S9E=";
+
   meta = with lib; {
     description = "A resource templating helper for Kubernetes";
     homepage = "http://kontemplate.works/";
diff --git a/ops/kontemplate/deps.nix b/ops/kontemplate/deps.nix
deleted file mode 100644
index 7693968bd5..0000000000
--- a/ops/kontemplate/deps.nix
+++ /dev/null
@@ -1,111 +0,0 @@
-# This file was generated by https://github.com/kamilchm/go2nix v1.3.0
-[
-  {
-    goPackagePath = "github.com/Masterminds/goutils";
-    fetch = {
-      type = "git";
-      url = "https://github.com/Masterminds/goutils";
-      rev = "41ac8693c5c10a92ea1ff5ac3a7f95646f6123b0";
-      sha256 = "180px47gj936qyk5bkv5mbbgiil9abdjq6kwkf7sq70vyi9mcfiq";
-    };
-  }
-  {
-    goPackagePath = "github.com/Masterminds/semver";
-    fetch = {
-      type = "git";
-      url = "https://github.com/Masterminds/semver";
-      rev = "5bc3b9184d48f1412b300b87a200cf020d9254cf";
-      sha256 = "1vdfm653v50jf63cw0kg2hslx50cn4mk6lj3p51bi11jrg48kfng";
-    };
-  }
-  {
-    goPackagePath = "github.com/Masterminds/sprig";
-    fetch = {
-      type = "git";
-      url = "https://github.com/Masterminds/sprig";
-      rev = "6f509977777c33eae63b2136d97f7b976cb971cc";
-      sha256 = "05h9k6fhjxnpwlihj3z02q9kvqvnq53jix0ab84sx0666bci3cdh";
-    };
-  }
-  {
-    goPackagePath = "github.com/alecthomas/template";
-    fetch = {
-      type = "git";
-      url = "https://github.com/alecthomas/template";
-      rev = "fb15b899a75114aa79cc930e33c46b577cc664b1";
-      sha256 = "1vlasv4dgycydh5wx6jdcvz40zdv90zz1h7836z7lhsi2ymvii26";
-    };
-  }
-  {
-    goPackagePath = "github.com/alecthomas/units";
-    fetch = {
-      type = "git";
-      url = "https://github.com/alecthomas/units";
-      rev = "c3de453c63f4bdb4dadffab9805ec00426c505f7";
-      sha256 = "0js37zlgv37y61j4a2d46jh72xm5kxmpaiw0ya9v944bjpc386my";
-    };
-  }
-  {
-    goPackagePath = "github.com/ghodss/yaml";
-    fetch = {
-      type = "git";
-      url = "https://github.com/ghodss/yaml";
-      rev = "25d852aebe32c875e9c044af3eef9c7dc6bc777f";
-      sha256 = "1w9yq0bxzygc4qwkwwiy7k1k1yviaspcqqv18255k2xkjv5ipccz";
-    };
-  }
-  {
-    goPackagePath = "github.com/google/uuid";
-    fetch = {
-      type = "git";
-      url = "https://github.com/google/uuid";
-      rev = "c2e93f3ae59f2904160ceaab466009f965df46d6";
-      sha256 = "0zw8fvl6jqg0fmv6kmvhss0g4gkrbvgyvl2zgy5wdbdlgp4fja0h";
-    };
-  }
-  {
-    goPackagePath = "github.com/huandu/xstrings";
-    fetch = {
-      type = "git";
-      url = "https://github.com/huandu/xstrings";
-      rev = "8bbcf2f9ccb55755e748b7644164cd4bdce94c1d";
-      sha256 = "1ivvc95514z63k7cpz71l0dwlanffmsh1pijhaqmp41kfiby8rsx";
-    };
-  }
-  {
-    goPackagePath = "github.com/imdario/mergo";
-    fetch = {
-      type = "git";
-      url = "https://github.com/imdario/mergo";
-      rev = "4c317f2286be3bd0c4f1a0e622edc6398ec4656d";
-      sha256 = "0bihha1qsgfjk14yv1hwddv3d8dzxpbjlaxwwyys6lhgxz1cr9h9";
-    };
-  }
-  {
-    goPackagePath = "golang.org/x/crypto";
-    fetch = {
-      type = "git";
-      url = "https://go.googlesource.com/crypto";
-      rev = "9756ffdc24725223350eb3266ffb92590d28f278";
-      sha256 = "0q7hxaaq6lp0v8qqzifvysl47z5rfdlrxkh3d29vsl3wyby3dxl8";
-    };
-  }
-  {
-    goPackagePath = "gopkg.in/alecthomas/kingpin.v2";
-    fetch = {
-      type = "git";
-      url = "https://gopkg.in/alecthomas/kingpin.v2";
-      rev = "947dcec5ba9c011838740e680966fd7087a71d0d";
-      sha256 = "0mndnv3hdngr3bxp7yxfd47cas4prv98sqw534mx7vp38gd88n5r";
-    };
-  }
-  {
-    goPackagePath = "gopkg.in/yaml.v2";
-    fetch = {
-      type = "git";
-      url = "https://gopkg.in/yaml.v2";
-      rev = "51d6538a90f86fe93ac480b35f37b2be17fef232";
-      sha256 = "01wj12jzsdqlnidpyjssmj0r4yavlqy7dwrg7adqd8dicjc4ncsa";
-    };
-  }
-]
diff --git a/ops/kontemplate/go.mod b/ops/kontemplate/go.mod
new file mode 100644
index 0000000000..e3ae158ea2
--- /dev/null
+++ b/ops/kontemplate/go.mod
@@ -0,0 +1,25 @@
+module github.com/tazjin/kontemplate
+
+go 1.22.3
+
+require (
+	github.com/Masterminds/sprig/v3 v3.2.3
+	github.com/alecthomas/kingpin/v2 v2.4.0
+	github.com/ghodss/yaml v1.0.0
+)
+
+require (
+	github.com/Masterminds/goutils v1.1.1 // indirect
+	github.com/Masterminds/semver/v3 v3.2.0 // indirect
+	github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 // indirect
+	github.com/google/uuid v1.1.1 // indirect
+	github.com/huandu/xstrings v1.3.3 // indirect
+	github.com/imdario/mergo v0.3.11 // indirect
+	github.com/mitchellh/copystructure v1.0.0 // indirect
+	github.com/mitchellh/reflectwalk v1.0.0 // indirect
+	github.com/shopspring/decimal v1.2.0 // indirect
+	github.com/spf13/cast v1.3.1 // indirect
+	github.com/xhit/go-str2duration/v2 v2.1.0 // indirect
+	golang.org/x/crypto v0.3.0 // indirect
+	gopkg.in/yaml.v2 v2.3.0 // indirect
+)
diff --git a/ops/kontemplate/go.sum b/ops/kontemplate/go.sum
new file mode 100644
index 0000000000..754cffbcb8
--- /dev/null
+++ b/ops/kontemplate/go.sum
@@ -0,0 +1,75 @@
+github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI=
+github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
+github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g=
+github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
+github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA=
+github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM=
+github.com/alecthomas/kingpin/v2 v2.4.0 h1:f48lwail6p8zpO1bC4TxtqACaGqHYA22qkHjHpqDjYY=
+github.com/alecthomas/kingpin/v2 v2.4.0/go.mod h1:0gyi0zQnjuFk8xrkNKamJoyUo382HRL7ATRpFZCw6tE=
+github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 h1:s6gZFSlWYmbqAuRjVTiNNhvNRfY2Wxp9nhfyel4rklc=
+github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
+github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
+github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY=
+github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/huandu/xstrings v1.3.3 h1:/Gcsuc1x8JVbJ9/rlye4xZnVAbEkGauT8lbebqcQws4=
+github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
+github.com/imdario/mergo v0.3.11 h1:3tnifQM4i+fbajXKBHXWEH+KvNHqojZ778UH75j3bGA=
+github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
+github.com/mitchellh/copystructure v1.0.0 h1:Laisrj+bAB6b/yJwB5Bt3ITZhGJdqmxquMKeZ+mmkFQ=
+github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw=
+github.com/mitchellh/reflectwalk v1.0.0 h1:9D+8oIskB4VJBN5SFlmc27fSlIBZaov1Wpk/IfikLNY=
+github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ=
+github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
+github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng=
+github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
+github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
+github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8=
+github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
+github.com/xhit/go-str2duration/v2 v2.1.0 h1:lxklc02Drh6ynqX+DdPyp5pCKLUQpRT8bp8Ydu2Bstc=
+github.com/xhit/go-str2duration/v2 v2.1.0/go.mod h1:ohY8p+0f07DiV6Em5LKB0s2YpLtXVyJfNt1+BlmyAsU=
+github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/crypto v0.3.0 h1:a06MkbcxBrEFc0w0QIZWXrH/9cCX6KJyWbBOIwAn+7A=
+golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
+golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
+golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
+golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
+golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
+golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
+gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/ops/kontemplate/main.go b/ops/kontemplate/main.go
index e55d42465c..c98229f5cd 100644
--- a/ops/kontemplate/main.go
+++ b/ops/kontemplate/main.go
@@ -21,9 +21,9 @@ import (
 	"os/exec"
 	"strings"
 
+	"github.com/alecthomas/kingpin/v2"
 	"github.com/tazjin/kontemplate/context"
 	"github.com/tazjin/kontemplate/templater"
-	"gopkg.in/alecthomas/kingpin.v2"
 )
 
 const version string = "1.8.0"
diff --git a/ops/kontemplate/templater/templater.go b/ops/kontemplate/templater/templater.go
index a8f0c670a6..86cbad459f 100644
--- a/ops/kontemplate/templater/templater.go
+++ b/ops/kontemplate/templater/templater.go
@@ -20,7 +20,7 @@ import (
 	"strings"
 	"text/template"
 
-	"github.com/Masterminds/sprig"
+	"github.com/Masterminds/sprig/v3"
 	"github.com/tazjin/kontemplate/context"
 	"github.com/tazjin/kontemplate/util"
 )
diff --git a/ops/machines/whitby/default.nix b/ops/machines/whitby/default.nix
index 6a8ee56abc..4c2fd0f2f5 100644
--- a/ops/machines/whitby/default.nix
+++ b/ops/machines/whitby/default.nix
@@ -25,6 +25,7 @@ in
     (mod "restic.nix")
     (mod "smtprelay.nix")
     (mod "sourcegraph.nix")
+    (mod "teleirc.nix")
     (mod "tvl-buildkite.nix")
     (mod "tvl-slapd/default.nix")
     (mod "tvl-users.nix")
@@ -232,6 +233,7 @@ in
       owothia.file = secretFile "owothia";
       panettone.file = secretFile "panettone";
       smtprelay.file = secretFile "smtprelay";
+      teleirc.file = secretFile "teleirc";
 
       buildkite-agent-token = {
         file = secretFile "buildkite-agent-token";
@@ -345,7 +347,12 @@ in
   # Start the Gerrit->IRC bot
   services.depot.clbot = {
     enable = true;
-    channels = [ "#tvix-dev" "#tvl" ];
+    channels = {
+      "#tvl" = { };
+      "#tvix-dev" = {
+        only_display = "tvix,nix-compat,third_party,third-party,3p";
+      };
+    };
 
     # See //fun/clbot for details.
     flags = {
@@ -410,6 +417,9 @@ in
       };
     };
 
+    # Run the Telegram<>IRC bridge for Volga Sprint.
+    teleirc.enable = true;
+
     # Run atward, the search engine redirection thing.
     atward.enable = true;
 
diff --git a/ops/modules/clbot.nix b/ops/modules/clbot.nix
index bdddff6c81..0a436a8749 100644
--- a/ops/modules/clbot.nix
+++ b/ops/modules/clbot.nix
@@ -7,6 +7,7 @@ let
 
   inherit (lib)
     listToAttrs
+    mapAttrsToList
     mkEnableOption
     mkIf
     mkOption
@@ -25,13 +26,13 @@ let
     ${pkgs.systemd}/bin/systemd-escape '${name}' >> $out
   ''));
 
-  mkUnit = flags: channel: {
+  mkUnit = channel: channelFlags: {
     name = "clbot-${systemdEscape channel}";
     value = {
       description = "${description} to ${channel}";
       wantedBy = [ "multi-user.target" ];
 
-      script = "${depot.fun.clbot}/bin/clbot ${mkFlags (cfg.flags // {
+      script = "${depot.fun.clbot}/bin/clbot ${mkFlags (cfg.flags // channelFlags // {
         irc_channel = channel;
       })} -alsologtostderr";
 
@@ -53,8 +54,8 @@ in
     };
 
     channels = mkOption {
-      type = with types; listOf str;
-      description = "Channels in which to post (generates one unit per channel)";
+      type = with types; attrsOf (attrsOf str);
+      description = "Channels in which to post (generates one unit per channel); nested attrs are used as extra flags to the service, which override the attrs in `flags`";
     };
 
     secretsFile = mkOption {
@@ -77,6 +78,6 @@ in
       };
     };
 
-    systemd.services = listToAttrs (map (mkUnit cfg.flags) cfg.channels);
+    systemd.services = listToAttrs (mapAttrsToList mkUnit cfg.channels);
   };
 }
diff --git a/ops/modules/teleirc.nix b/ops/modules/teleirc.nix
new file mode 100644
index 0000000000..9f9ac059ce
--- /dev/null
+++ b/ops/modules/teleirc.nix
@@ -0,0 +1,40 @@
+# Run the Telegram<>IRC sync bot for the Volga Sprint channel.
+#
+# This module is written in a pretty ad-hoc style, as it is sort of a
+# throwaway thing (will be removed again after the event).
+{ depot, config, lib, pkgs, ... }:
+
+let
+  cfg = config.services.depot.owothia;
+  description = "IRC<>Telegram sync for Volga Sprint channel";
+  configFile = builtins.toFile "teleirc.env" ''
+    # connect through tvlbot's ZNC bouncer
+    IRC_SERVER="localhost"
+    IRC_PORT=2627
+    IRC_USE_SSL=false
+    IRC_CHANNEL="#volgasprint"
+    IRC_BLACKLIST="tvlbot"
+    IRC_BOT_NAME="tvlbot"
+    IRC_BOT_REALNAME="TVL bot for Volga Sprint"
+    IRC_BOT_IDENT="tvlbot"
+    IRC_SEND_STICKER_EMOJI=false # look into this
+    TELEGRAM_CHAT_ID=-1002153072030
+  '';
+in
+{
+  options.services.depot.teleirc.enable = lib.mkEnableOption description;
+
+  config = lib.mkIf cfg.enable {
+    systemd.services.teleirc = {
+      inherit description;
+      wantedBy = [ "multi-user.target" ];
+
+      serviceConfig = {
+        DynamicUser = true;
+        Restart = "always";
+        EnvironmentFile = "/run/agenix/teleirc";
+        ExecStart = "${depot.third_party.teleirc}/bin/teleirc -conf ${configFile}";
+      };
+    };
+  };
+}
diff --git a/ops/secrets/secrets.nix b/ops/secrets/secrets.nix
index 5cbf2bf612..bc32d23597 100644
--- a/ops/secrets/secrets.nix
+++ b/ops/secrets/secrets.nix
@@ -9,6 +9,12 @@ let
 
     # zamalek
     "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDBRXeb8EuecLHP0bW4zuebXp4KRnXgJTZfeVWXQ1n1R"
+
+    # khamovnik
+    "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAID1ptE5HvGSXxSXo+aHBTKa5PBlAM1HqmpzWz0yAhHLj"
+
+    # arbat
+    "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJ1Eai0p7eF7XML5wokqF4GlVZM+YXEORfs/GPGwEky7"
   ];
 
   aspen = [
@@ -47,6 +53,7 @@ in
   "owothia.age" = whitbyDefault;
   "panettone.age" = whitbyDefault;
   "smtprelay.age" = whitbyDefault;
+  "teleirc.age" = whitbyDefault;
   "tf-buildkite.age" = terraform;
   "tf-glesys.age" = terraform;
   "tf-keycloak.age" = terraform;
diff --git a/ops/secrets/teleirc.age b/ops/secrets/teleirc.age
new file mode 100644
index 0000000000..ebc88fc9ef
--- /dev/null
+++ b/ops/secrets/teleirc.age
Binary files differdiff --git a/ops/terraform/deploy-nixos/README.md b/ops/terraform/deploy-nixos/README.md
index fd0bd1b442..2580a7c0e1 100644
--- a/ops/terraform/deploy-nixos/README.md
+++ b/ops/terraform/deploy-nixos/README.md
@@ -25,8 +25,17 @@ deploy is necessary.
 
 ```terraform
 module "deploy_somehost" {
+  # Clone just this directory through josh. Add a `ref=` parameter to pin to a specific commit.
   source              = "git::https://code.tvl.fyi/depot.git:/ops/terraform/deploy-nixos.git"
+
+  # The attribute.path pointing to the expression to instantiate.
   attrpath            = "ops.nixos.somehost"
+
+  # The path to the Nix file to invoke. Optional.
+  # If omitted, will shell out to git to determine the repo root, and Nix will
+  # use `default.nix` in there.
+  entrypoint          = "${path.module}/../../somewhere.nix"
+
   target_host         = "somehost.tvl.su"
   target_user         = "someone"
   target_user_ssh_key = tls_private_key.somehost.private_key_pem
@@ -37,9 +46,6 @@ module "deploy_somehost" {
 
 Several things can be improved about this module, for example:
 
-* The repository root (relative to which the attribute path is evaluated) could
-  be made configurable.
-
 * The remote system closure could be discovered to restore remote system state
   after manual deploys on the target (i.e. "stomping" of changes).
 
diff --git a/ops/users/default.nix b/ops/users/default.nix
index c54a681dce..cd1ca4fc27 100644
--- a/ops/users/default.nix
+++ b/ops/users/default.nix
@@ -22,6 +22,11 @@
     password = "{ARGON2}$argon2id$v=19$m=65536,t=2,p=1$5NEYPJ19nDITK5sGr4bzhQ$Xzpzth6y4w+HGvioHiYgzqFiwMDx0B7HAh+PVbkRuuk";
   }
   {
+    username = "chickadee";
+    email = "matthewktromp@gmail.com";
+    password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$HoZjVdJ90JmTEJf1MMLuDg$5Pa8kpJdFVsIxgoOTDsH0gv6CLumSIkMqYEn5UVfjwU";
+  }
+  {
     username = "cschilling";
     email = "christian.schilling.de@gmail.com";
     password = "{ARGON2}$argon2id$v=19$m=65536,t=2,p=1$9VN3IS6ViW5FFbVKWOZI6Q$gZxuYAYk0Opq4E5i8cbcNjfznCQNc+RiP7Xv1CUnrQU";
@@ -229,4 +234,24 @@
     email = "tvl@alice-carroll.pet";
     password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$mt/0RzKw4RHxm7ybpMHP5Q$P/SDBMv5si9D98NFO/eZgh2+InlByqYxqAvQWhl+p0c";
   }
+  {
+    username = "yuka";
+    email = "tvl@yuka.dev";
+    password = "{ARGON2}$argon2id$v=19$m=65536,t=2,p=1$aEyiAIuynQMwfY7xE+pMxg$QdghylHO2JZMR/YyYf4UAnhhb/gBdAkoDeANEwdixxU";
+  }
+  {
+    username = "benjaminedwardwebb";
+    email = "benjaminedwardwebb@gmail.com";
+    password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$kdFNmxgIGsF8TkB/GoPy1A$GUXd3M35Jqxqlfra4gPCcFW3ehE0RVrlHOzaoD7Pu7s";
+  }
+  {
+    username = "fmzakari";
+    email = "farid.m.zakaria@gmail.com";
+    password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$NzSX6x2+mApMvhNrvVIWaQ$/GUwbj+6GUyJL8XSgxTThc3TmVTM4WLQ+6KMC4NwovE";
+  }
+  {
+    username = "toastal";
+    email = "toastal@posteo.net";
+    password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$txwVjPn9kKPUgsZnPtpyaA$pE0ISDGScCE4JCKcmbnzC+GZZ4PP6MqKJKmR/sxo6TY";
+  }
 ]
diff --git a/third_party/exwm/exwm-core.el b/third_party/exwm/exwm-core.el
index e0d644d941..a7fdfce710 100644
--- a/third_party/exwm/exwm-core.el
+++ b/third_party/exwm/exwm-core.el
@@ -82,6 +82,7 @@ Here are some predefined candidates:
 (defvar exwm-input--simulation-keys)
 (defvar exwm-input-line-mode-passthrough)
 (defvar exwm-input-prefix-keys)
+(defvar exwm-workspace--list)
 (declare-function exwm-input--fake-key "exwm-input.el" (event))
 (declare-function exwm-input--on-KeyPress-line-mode "exwm-input.el"
                   (key-press raw-data))
@@ -94,6 +95,8 @@ Here are some predefined candidates:
 (declare-function exwm-manage--kill-buffer-query-function "exwm-manage.el")
 (declare-function exwm-workspace-move-window "exwm-workspace.el"
                   (frame-or-index &optional id))
+(declare-function exwm-workspace-switch "exwm-workspace.el"
+                  (frame-or-index &optional force))
 
 (define-minor-mode exwm-debug
   "Debug-logging enabled if non-nil."
@@ -229,6 +232,14 @@ If CONN is non-nil, use it instead of the value of the variable
       (setq ret-depth depth))
     (list ret-visual ret-depth ret-colormap)))
 
+(defun exwm--mode-name ()
+  "Mode name string used in `exwm-mode' buffers."
+  (let ((name "EXWM"))
+    (if (cl-some (lambda (i) (frame-parameter i 'exwm-urgency))
+                 exwm-workspace--list)
+        (propertize name 'face 'font-lock-warning-face)
+      name)))
+
 ;; Internal variables
 (defvar-local exwm--id nil)               ;window ID
 (defvar-local exwm--configurations nil)   ;initial configurations.
@@ -311,7 +322,7 @@ One of `line-mode' or `char-mode'.")
 ;; Also, inactive entries should be disabled rather than hidden.
 (easy-menu-define exwm-mode-menu exwm-mode-map
   "Menu for `exwm-mode'."
-  '("EXWM"
+  `("EXWM"
     "---"
     "*General*"
     "---"
@@ -336,22 +347,20 @@ One of `line-mode' or `char-mode'.")
     ["Send key" exwm-input-send-next-key (eq exwm--input-mode 'line-mode)]
     ;; This is merely a reference.
     ("Send simulation key" :filter
-     (lambda (&rest _args)
-       (let (result)
-         (maphash
-          (lambda (key value)
-            (when (sequencep key)
-              (setq result (append result
-                                   `([
-                                      ,(format "Send '%s'"
-                                               (key-description value))
-                                      (lambda ()
-                                        (interactive)
-                                        (dolist (i ',value)
-                                          (exwm-input--fake-key i)))
-                                      :keys ,(key-description key)])))))
-          exwm-input--simulation-keys)
-         result)))
+     ,(lambda (&rest _args)
+        (let (result)
+          (maphash
+           (lambda (key value)
+             (when (sequencep key)
+               (setq result (append result
+                                    `([,(format "Send '%s'"
+                                                (key-description value))
+                                       ,(lambda ()
+                                          (interactive)
+                                          (mapc #'exwm-input--fake-key value))
+                                       :keys ,(key-description key)])))))
+           exwm-input--simulation-keys)
+          result)))
 
     ["Define global binding" exwm-input-set-key]
 
@@ -368,26 +377,20 @@ One of `line-mode' or `char-mode'.")
     ["Switch workspace" exwm-workspace-switch]
     ;; Place this entry at bottom to avoid selecting others by accident.
     ("Switch to" :filter
-     (lambda (&rest _args)
-       (mapcar (lambda (i)
-                 `[,(format "Workspace %d" i)
-                   (lambda ()
-                     (interactive)
-                     (exwm-workspace-switch ,i))
-                   (/= ,i exwm-workspace-current-index)])
-               (number-sequence 0 (1- (exwm-workspace--count))))))))
+     ,(lambda (&rest _args)
+        (mapcar (lambda (i)
+                  `[,(format "Workspace %d" i)
+                    ,(lambda ()
+                       (interactive)
+                       (exwm-workspace-switch i))
+                    (/= ,i exwm-workspace-current-index)])
+                (number-sequence 0 (1- (length exwm-workspace--list))))))))
 
 (define-derived-mode exwm-mode nil "EXWM"
   "Major mode for managing X windows.
 
 \\{exwm-mode-map}"
-  ;;
-  (setq mode-name
-        '(:eval (propertize "EXWM" 'face
-                            (when (cl-some (lambda (i)
-                                             (frame-parameter i 'exwm-urgency))
-                                           exwm-workspace--list)
-                              'font-lock-warning-face))))
+  :interactive nil :abbrev-table nil :syntax-table nil
   ;; Change major-mode is not allowed
   (add-hook 'change-major-mode-hook #'kill-buffer nil t)
   ;; Kill buffer -> close window
@@ -396,7 +399,8 @@ One of `line-mode' or `char-mode'.")
   ;; Redirect events when executing keyboard macros.
   (push `(executing-kbd-macro . ,exwm--kmacro-map)
         minor-mode-overriding-map-alist)
-  (setq buffer-read-only t
+  (setq mode-name '(:eval (exwm--mode-name))
+        buffer-read-only t
         cursor-type nil
         left-margin-width nil
         right-margin-width nil
diff --git a/third_party/exwm/exwm-floating.el b/third_party/exwm/exwm-floating.el
index 34d06a30db..574a78f015 100644
--- a/third_party/exwm/exwm-floating.el
+++ b/third_party/exwm/exwm-floating.el
@@ -67,11 +67,11 @@ This hook runs in the context of the corresponding buffer."
 
 (defcustom exwm-floating-border-width 1
   "Border width of floating windows."
-  :type '(integer
-          :validate (lambda (widget)
-                      (when (< (widget-value widget) 0)
-                        (widget-put widget :error "Border width is at least 0")
-                        widget)))
+  :type `(integer
+          :validate ,(lambda (widget)
+                       (when (< (widget-value widget) 0)
+                         (widget-put widget :error "Border width is at least 0")
+                         widget)))
   :initialize #'custom-initialize-default
   :set (lambda (symbol value)
          (let ((delta (- value exwm-floating-border-width))
diff --git a/third_party/exwm/exwm-input.el b/third_party/exwm/exwm-input.el
index f1f035c91a..eac0ef6a37 100644
--- a/third_party/exwm/exwm-input.el
+++ b/third_party/exwm/exwm-input.el
@@ -46,7 +46,7 @@
   '(?\C-x ?\C-u ?\C-h ?\M-x ?\M-` ?\M-& ?\M-:)
   "List of prefix keys EXWM should forward to Emacs when in `line-mode'.
 
-The point is to make keys like 'C-x C-f' forwarded to Emacs in `line-mode'.
+The point is to make keys like `C-x C-f' forwarded to Emacs in `line-mode'.
 There is no need to add prefix keys for global/simulation keys or those
 defined in `exwm-mode-map' here."
   :type '(repeat key-sequence)
diff --git a/third_party/exwm/exwm-layout.el b/third_party/exwm/exwm-layout.el
index 8649c11ffd..83421b2e99 100644
--- a/third_party/exwm/exwm-layout.el
+++ b/third_party/exwm/exwm-layout.el
@@ -602,9 +602,7 @@ See also `exwm-layout-enlarge-window'."
   ;; Auto refresh layout
   (exwm--log)
   (add-hook 'window-configuration-change-hook #'exwm-layout--refresh)
-  ;; The behavior of `window-configuration-change-hook' will be changed.
-  (when (fboundp 'window-pixel-width-before-size-change)
-    (add-hook 'window-size-change-functions #'exwm-layout--refresh))
+  (add-hook 'window-size-change-functions #'exwm-layout--refresh)
   (unless (exwm-workspace--minibuffer-own-frame-p)
     ;; Refresh when minibuffer grows
     (add-hook 'minibuffer-setup-hook #'exwm-layout--on-minibuffer-setup t)
@@ -616,8 +614,7 @@ See also `exwm-layout-enlarge-window'."
   "Exit the layout module."
   (exwm--log)
   (remove-hook 'window-configuration-change-hook #'exwm-layout--refresh)
-  (when (fboundp 'window-pixel-width-before-size-change)
-    (remove-hook 'window-size-change-functions #'exwm-layout--refresh))
+  (remove-hook 'window-size-change-functions #'exwm-layout--refresh)
   (remove-hook 'minibuffer-setup-hook #'exwm-layout--on-minibuffer-setup)
   (when exwm-layout--timer
     (cancel-timer exwm-layout--timer)
diff --git a/third_party/exwm/exwm-systemtray.el b/third_party/exwm/exwm-systemtray.el
index 9e57dae4eb..2b46568152 100644
--- a/third_party/exwm/exwm-systemtray.el
+++ b/third_party/exwm/exwm-systemtray.el
@@ -46,15 +46,6 @@
    (visible :initarg :visible))
   :documentation "Attributes of a system tray icon.")
 
-(defclass xcb:systemtray:-ClientMessage
-  (xcb:icccm:--ClientMessage xcb:ClientMessage)
-  ((format :initform 32)
-   (type :initform 'xcb:Atom:MANAGER)
-   (time :initarg :time :type xcb:TIMESTAMP)      ;new slot
-   (selection :initarg :selection :type xcb:ATOM) ;new slot
-   (owner :initarg :owner :type xcb:WINDOW))      ;new slot
-  :documentation "A systemtray client message.")
-
 (defgroup exwm-systemtray nil
   "System tray."
   :group 'exwm)
@@ -542,7 +533,7 @@ Argument DATA contains the raw event data."
                        :destination exwm--root
                        :event-mask xcb:EventMask:StructureNotify
                        :event (xcb:marshal
-                               (make-instance 'xcb:systemtray:-ClientMessage
+                               (make-instance 'xcb:icccm:-ManagerSelection
                                               :window exwm--root
                                               :time xcb:Time:CurrentTime
                                               :selection
diff --git a/third_party/exwm/exwm-workspace.el b/third_party/exwm/exwm-workspace.el
index 89be697159..9337dc08ab 100644
--- a/third_party/exwm/exwm-workspace.el
+++ b/third_party/exwm/exwm-workspace.el
@@ -1257,12 +1257,10 @@ ALIST is an action alist, as accepted by function `display-buffer'."
   ;;        fail to retrieve the correct window.  It's likely there are
   ;;        other related issues.
   ;; This is not required by Emacs 24.
-  (when (fboundp 'window-preserve-size)
-    (let ((window (get-buffer-window "*Completions*"
-                                     exwm-workspace--current)))
-      (when window
-        (fit-window-to-buffer window)
-        (window-preserve-size window)))))
+  (let ((window (get-buffer-window "*Completions*" exwm-workspace--current)))
+    (when window
+      (fit-window-to-buffer window)
+      (window-preserve-size window))))
 
 (defun exwm-workspace--on-minibuffer-exit ()
   "Run in `minibuffer-exit-hook' to hide the minibuffer container."
diff --git a/third_party/exwm/exwm-xsettings.el b/third_party/exwm/exwm-xsettings.el
index 99d6b9c4ac..596588b823 100644
--- a/third_party/exwm/exwm-xsettings.el
+++ b/third_party/exwm/exwm-xsettings.el
@@ -293,7 +293,7 @@ SERIAL is a sequence number."
                        :destination exwm--root
                        :event-mask xcb:EventMask:StructureNotify
                        :event (xcb:marshal
-                               (make-instance 'xcb:xsettings:-ClientMessage
+                               (make-instance 'xcb:icccm:-ManagerSelection
                                               :window exwm--root
                                               :time xcb:Time:CurrentTime
                                               :selection exwm-xsettings--XSETTINGS_S0-atom
diff --git a/third_party/exwm/exwm.el b/third_party/exwm/exwm.el
index c4900eab48..1186a40f44 100644
--- a/third_party/exwm/exwm.el
+++ b/third_party/exwm/exwm.el
@@ -4,8 +4,8 @@
 
 ;; Author: Chris Feng <chris.w.feng@gmail.com>
 ;; Maintainer: Adrián Medraño Calvo <adrian@medranocalvo.com>, Steven Allen <steven@stebalien.com>, Daniel Mendler <mail@daniel-mendler.de>
-;; Version: 0.28
-;; Package-Requires: ((emacs "27.1") (xelb "0.18"))
+;; Version: 0.30
+;; Package-Requires: ((emacs "27.1") (xelb "0.19"))
 ;; Keywords: unix
 ;; URL: https://github.com/emacs-exwm/exwm
 
@@ -493,23 +493,20 @@ RAW-DATA contains unmarshalled ClientMessage event data."
      ;; _NET_ACTIVE_WINDOW.
      ((= type xcb:Atom:_NET_ACTIVE_WINDOW)
       (let ((buffer (exwm--id->buffer id))
-            iconic window)
+            window)
         (if (buffer-live-p buffer)
           ;; Either an `exwm-mode' buffer (an X window) or a floating frame.
           (with-current-buffer buffer
             (when (eq exwm--frame exwm-workspace--current)
               (if exwm--floating-frame
                   (select-frame exwm--floating-frame)
-                (setq iconic (exwm-layout--iconic-state-p))
-                (when iconic
+                (setq window (get-buffer-window nil t))
+                (unless window
                   ;; State change: iconic => normal.
-                  (set-window-buffer (frame-selected-window exwm--frame)
-                                     (current-buffer)))
+                  (setq window (frame-selected-window exwm--frame))
+                  (set-window-buffer window (current-buffer)))
                 ;; Focus transfer.
-                (setq window (get-buffer-window nil t))
-                (when (or iconic
-                          (not (eq window (selected-window))))
-                  (select-window window)))))
+                (select-window window))))
           ;; A workspace.
           (dolist (f exwm-workspace--list)
             (when (eq id (frame-parameter f 'exwm-outer-id))
diff --git a/third_party/geesefs/default.nix b/third_party/geesefs/default.nix
deleted file mode 100644
index 98448bb737..0000000000
--- a/third_party/geesefs/default.nix
+++ /dev/null
@@ -1,25 +0,0 @@
-# Finally, a good FUSE FS implementation over S3.
-# https://github.com/yandex-cloud/geesefs
-
-{ pkgs, ... }:
-
-pkgs.buildGoModule rec {
-  pname = "geesefs";
-  version = "0.40.1";
-
-  src = pkgs.fetchFromGitHub {
-    owner = "yandex-cloud";
-    repo = "geesefs";
-    rev = "v${version}";
-    hash = "sha256:0ig8h17z8n5j8qb7k2jyh40vv77zazhnz8bxdam9xihxksj8mizp";
-  };
-
-  subPackages = [ "." ];
-  buildInputs = [ pkgs.fuse ];
-  vendorHash = "sha256:11i7cmnlxi00d0csgpv8drfcw0aqshwc4hfs0jw7zwafdhnlyy0j";
-
-  meta = with pkgs.lib; {
-    license = licenses.asl20;
-    maintainers = [ maintainers.tazjin ];
-  };
-}
diff --git a/third_party/nixpkgs/default.nix b/third_party/nixpkgs/default.nix
index b79a963e5c..03dc7b267c 100644
--- a/third_party/nixpkgs/default.nix
+++ b/third_party/nixpkgs/default.nix
@@ -52,7 +52,13 @@ let
   # Overlay for packages that should come from the stable channel
   # instead (e.g. because something is broken in unstable).
   # Use `stableNixpkgs` from above.
-  stableOverlay = _unstableSelf: unstableSuper: { };
+  stableOverlay = _unstableSelf: unstableSuper: {
+    # newer trunk fails somewhere within reqwest, trying to read a mystery file
+    trunk = stableNixpkgs.trunk;
+
+    # the big lis package change breaks everything in //3p/lisp, undo it for now.
+    lispPackages = stableNixpkgs.lispPackages;
+  };
 
   # Overlay to expose the nixpkgs commits we are using to other Nix code.
   commitsOverlay = _: _: {
diff --git a/third_party/overlays/patches/crate2nix-drop-darwin-explicit-dontstrip.patch b/third_party/overlays/patches/crate2nix-drop-darwin-explicit-dontstrip.patch
new file mode 100644
index 0000000000..ba64e660c0
--- /dev/null
+++ b/third_party/overlays/patches/crate2nix-drop-darwin-explicit-dontstrip.patch
@@ -0,0 +1,22 @@
+From 0209f258cda8a9972a785e26d92fb477ce4d1b0e Mon Sep 17 00:00:00 2001
+From: Ilan Joselevich <personal@ilanjoselevich.com>
+Date: Tue, 11 Jun 2024 18:14:06 +0300
+Subject: [PATCH] Get rid of dontStrip for Darwin as it's no longer needed
+
+Fixed in https://github.com/NixOS/nixpkgs/pull/255900
+---
+ templates/nix/crate2nix/default.nix                  | 2 --
+
+diff --git a/templates/nix/crate2nix/default.nix b/templates/nix/crate2nix/default.nix
+index 95d3730f..c53925e7 100644
+--- a/templates/nix/crate2nix/default.nix
++++ b/templates/nix/crate2nix/default.nix
+@@ -349,8 +349,6 @@ rec {
+           buildRustCrateForPkgsFunc pkgs
+             (
+               crateConfig // {
+-                # https://github.com/NixOS/nixpkgs/issues/218712
+-                dontStrip = stdenv.hostPlatform.isDarwin;
+                 src = crateConfig.src or (
+                   pkgs.fetchurl rec {
+                     name = "${crateConfig.crateName}-${crateConfig.version}.tar.gz";
diff --git a/third_party/overlays/tvl.nix b/third_party/overlays/tvl.nix
index b54e899b88..f2260be8b8 100644
--- a/third_party/overlays/tvl.nix
+++ b/third_party/overlays/tvl.nix
@@ -21,17 +21,6 @@ depot.nix.readTree.drvTargets {
     withAWS = false;
   });
 
-  # To match telega in emacs-overlay or wherever
-  tdlib = super.tdlib.overrideAttrs (_: {
-    version = "1.8.24";
-    src = self.fetchFromGitHub {
-      owner = "tdlib";
-      repo = "td";
-      rev = "d79bd4b69403868897496da39b773ab25c69f6af";
-      sha256 = "0bc5akzw12qwj45rzqkrhw65qlrn9q8pzmvc5aiqv4bvhkb1ghl0";
-    };
-  });
-
   home-manager = super.home-manager.overrideAttrs (_: {
     src = depot.third_party.sources.home-manager;
     version = "git-"
@@ -69,6 +58,17 @@ depot.nix.readTree.drvTargets {
           sha256 = "1mmlrd2zpcwiv8gh10y7lrpflnbmsycdascrxjr3bfcwa8yx7901";
         };
       };
+
+      # Override telega sources until MELPA updates in nixpkgs resume.
+      telega = esuper.telega.overrideAttrs (_: {
+        version = "0.8.291"; # unstable
+        src = self.fetchFromGitHub {
+          owner = "zevlg";
+          repo = "telega.el";
+          rev = "58b4963b292ceb723d665df100b519eb5a99c676";
+          sha256 = "1q3ydbm0jhrsyvvdn0mpmxvskq0l53jkh40a5hlx7i3qkinbhbry";
+        };
+      });
     })
   );
 
@@ -101,6 +101,8 @@ depot.nix.readTree.drvTargets {
     patches = old.patches or [ ] ++ [
       # https://github.com/nix-community/crate2nix/pull/301
       ./patches/crate2nix-tests-debug.patch
+      # TODO(Kranzes): drop on next release
+      ./patches/crate2nix-drop-darwin-explicit-dontstrip.patch
     ];
   });
 
@@ -120,4 +122,17 @@ depot.nix.readTree.drvTargets {
   tpm2-pkcs11 = super.tpm2-pkcs11.overrideAttrs (old: {
     patches = (old.patches or [ ]) ++ [ ./patches/tpm2-pkcs11-190-dbupgrade.patch ];
   });
+
+  # macFUSE bump containing fix for https://github.com/osxfuse/osxfuse/issues/974
+  # https://github.com/NixOS/nixpkgs/pull/320197
+  fuse =
+    if super.stdenv.isDarwin then
+      super.fuse.overrideAttrs
+        (old: rec {
+          version = "4.8.0";
+          src = super.fetchurl {
+            url = "https://github.com/osxfuse/osxfuse/releases/download/macfuse-${version}/macfuse-${version}.dmg";
+            hash = "sha256-ucTzO2qdN4QkowMVvC3+4pjEVjbwMsB0xFk+bvQxwtQ=";
+          };
+        }) else super.fuse;
 }
diff --git a/third_party/sources/sources.json b/third_party/sources/sources.json
index 109451ff51..8844eddc9c 100644
--- a/third_party/sources/sources.json
+++ b/third_party/sources/sources.json
@@ -5,10 +5,10 @@
         "homepage": "https://matrix.to/#/#agenix:nixos.org",
         "owner": "ryantm",
         "repo": "agenix",
-        "rev": "0.15.0",
-        "sha256": "01dhrghwa7zw93cybvx4gnrskqk97b004nfxgsys0736823956la",
+        "rev": "c2fc0762bbe8feb06a2e59a364fa81b3a57671c9",
+        "sha256": "1lpkwinlax40b7xgzspbkm9rsi4a1x48hxhixnni4irxxwnav0ah",
         "type": "tarball",
-        "url": "https://github.com/ryantm/agenix/archive/0.15.0.tar.gz",
+        "url": "https://github.com/ryantm/agenix/archive/c2fc0762bbe8feb06a2e59a364fa81b3a57671c9.tar.gz",
         "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
     },
     "home-manager": {
@@ -17,10 +17,10 @@
         "homepage": "https://nix-community.github.io/home-manager/",
         "owner": "nix-community",
         "repo": "home-manager",
-        "rev": "c1609d584a6b5e9e6a02010f51bd368cb4782f8e",
-        "sha256": "112r86p3iah1xahwlp82yd3gvh10wkf271za5h7v3jsqv08c6gkr",
+        "rev": "a7117efb3725e6197dd95424136f79147aa35e5b",
+        "sha256": "02q3ck1hjs8xzdhfikqxrnsfs9vh4p7rmdha3vbp6nkkdbdvhgg7",
         "type": "tarball",
-        "url": "https://github.com/nix-community/home-manager/archive/c1609d584a6b5e9e6a02010f51bd368cb4782f8e.tar.gz",
+        "url": "https://github.com/nix-community/home-manager/archive/a7117efb3725e6197dd95424136f79147aa35e5b.tar.gz",
         "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
     },
     "impermanence": {
@@ -41,10 +41,10 @@
         "homepage": "",
         "owner": "nmattia",
         "repo": "naersk",
-        "rev": "c5037590290c6c7dae2e42e7da1e247e54ed2d49",
-        "sha256": "1ql5ziwfrpmc8cxhgflmdy2z06z4dsdfzjwb2vv9bag6a2chrvq8",
+        "rev": "fa19d8c135e776dc97f4dcca08656a0eeb28d5c0",
+        "sha256": "1mif058gcbw5d5yixsmzalqlr0h9m9mmbsgv8v4r2mmsbw83k2x0",
         "type": "tarball",
-        "url": "https://github.com/nmattia/naersk/archive/c5037590290c6c7dae2e42e7da1e247e54ed2d49.tar.gz",
+        "url": "https://github.com/nmattia/naersk/archive/fa19d8c135e776dc97f4dcca08656a0eeb28d5c0.tar.gz",
         "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
     },
     "napalm": {
@@ -65,10 +65,10 @@
         "homepage": "",
         "owner": "NixOS",
         "repo": "nixpkgs",
-        "rev": "7bb2ccd8cdc44c91edba16c48d2c8f331fb3d856",
-        "sha256": "0ijqx995jw9i16f28whyjdll9b0nydmyl4n91bci2cgryxms7f8f",
+        "rev": "051f920625ab5aabe37c920346e3e69d7d34400e",
+        "sha256": "08lin51g5x2vv89rs6vmqxnyy8pfysh0wdp6mdxw6l86dpm2rbg2",
         "type": "tarball",
-        "url": "https://github.com/NixOS/nixpkgs/archive/7bb2ccd8cdc44c91edba16c48d2c8f331fb3d856.tar.gz",
+        "url": "https://github.com/NixOS/nixpkgs/archive/051f920625ab5aabe37c920346e3e69d7d34400e.tar.gz",
         "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
     },
     "nixpkgs-stable": {
@@ -77,10 +77,10 @@
         "homepage": "",
         "owner": "NixOS",
         "repo": "nixpkgs",
-        "rev": "dd37924974b9202f8226ed5d74a252a9785aedf8",
-        "sha256": "1nxd4dqci8rs94a7cypx30axgj778p2wydkx16q298n29crkflbw",
+        "rev": "a2e1d0414259a144ebdc048408a807e69e0565af",
+        "sha256": "1jv90bz3s7j294fhpb29k735fg3xfs9z848szicqarpbz7wfg03g",
         "type": "tarball",
-        "url": "https://github.com/NixOS/nixpkgs/archive/dd37924974b9202f8226ed5d74a252a9785aedf8.tar.gz",
+        "url": "https://github.com/NixOS/nixpkgs/archive/a2e1d0414259a144ebdc048408a807e69e0565af.tar.gz",
         "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
     },
     "rust-overlay": {
@@ -89,10 +89,10 @@
         "homepage": "",
         "owner": "oxalica",
         "repo": "rust-overlay",
-        "rev": "2a42c742ab04b61d9b2f1edf392842cf9f27ebfd",
-        "sha256": "1wpkca75ysb2ssycc0dshd1m76q8iqhzrrbr6xmfmkkcj1p333nk",
+        "rev": "6dc3e45fe4aee36efeed24d64fc68b1f989d5465",
+        "sha256": "0vqgkzbfdj920lbm1dy8kylrv2gk4ard38lb3i20xvp2mp1d39n2",
         "type": "tarball",
-        "url": "https://github.com/oxalica/rust-overlay/archive/2a42c742ab04b61d9b2f1edf392842cf9f27ebfd.tar.gz",
+        "url": "https://github.com/oxalica/rust-overlay/archive/6dc3e45fe4aee36efeed24d64fc68b1f989d5465.tar.gz",
         "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
     },
     "rustsec-advisory-db": {
@@ -101,10 +101,10 @@
         "homepage": "https://rustsec.org",
         "owner": "RustSec",
         "repo": "advisory-db",
-        "rev": "35e7459a331d3e0c585e56dabd03006b9b354088",
-        "sha256": "1j8c0vzwg6b9lxmdy2a40pvwsy2kncv455spbjbxsj10p2vmy5fl",
+        "rev": "af76d4423761499f954411bb3071dcc72e6b0450",
+        "sha256": "167qxr66j638km3z7zk2drjdr4bgqz77hr35vkwdp0lbafmd6y1c",
         "type": "tarball",
-        "url": "https://github.com/RustSec/advisory-db/archive/35e7459a331d3e0c585e56dabd03006b9b354088.tar.gz",
+        "url": "https://github.com/RustSec/advisory-db/archive/af76d4423761499f954411bb3071dcc72e6b0450.tar.gz",
         "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
     }
 }
diff --git a/third_party/teleirc/default.nix b/third_party/teleirc/default.nix
new file mode 100644
index 0000000000..8791511002
--- /dev/null
+++ b/third_party/teleirc/default.nix
@@ -0,0 +1,23 @@
+{ pkgs, lib, ... }:
+
+pkgs.buildGoModule rec {
+  name = "teleirc";
+  version = "2.3.0-4";
+
+  src = pkgs.fetchFromGitHub {
+    owner = "tvlfyi";
+    repo = "teleirc";
+    rev = "356ed1450840822172e7dff57965cc5371f63454";
+    sha256 = "0s6rlixks7lar9js4q1drg742cy2p4n8l4pmlzjmskl5d04c15gq";
+  };
+
+  vendorHash = "sha256:06f2wyxbphj73wknpp6dsn7rb4yhvdl6x0gj729cns7r4bsviscs";
+  ldflags = [ "-s" "-w" "-X" "main.version=${version}" ];
+  postInstall = "mv $out/bin/cmd $out/bin/teleirc";
+
+  meta = with lib; {
+    description = "IRC/Telegram bridge";
+    homepage = "https://docs.teleirc.com/en/latest/";
+    license = licenses.gpl3;
+  };
+}
diff --git a/tvix/Cargo.lock b/tvix/Cargo.lock
index 92229302dd..ab7eec0b73 100644
--- a/tvix/Cargo.lock
+++ b/tvix/Cargo.lock
@@ -310,13 +310,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf"
 dependencies = [
  "async-trait",
- "axum-core 0.3.4",
+ "axum-core",
  "bitflags 1.3.2",
  "bytes",
  "futures-util",
- "http 0.2.11",
- "http-body 0.4.6",
- "hyper 0.14.28",
+ "http",
+ "http-body",
+ "hyper",
  "itoa",
  "matchit",
  "memchr",
@@ -332,40 +332,6 @@ dependencies = [
 ]
 
 [[package]]
-name = "axum"
-version = "0.7.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1236b4b292f6c4d6dc34604bb5120d85c3fe1d1aa596bd5cc52ca054d13e7b9e"
-dependencies = [
- "async-trait",
- "axum-core 0.4.3",
- "bytes",
- "futures-util",
- "http 1.1.0",
- "http-body 1.0.0",
- "http-body-util",
- "hyper 1.3.1",
- "hyper-util",
- "itoa",
- "matchit",
- "memchr",
- "mime",
- "percent-encoding",
- "pin-project-lite",
- "rustversion",
- "serde",
- "serde_json",
- "serde_path_to_error",
- "serde_urlencoded",
- "sync_wrapper",
- "tokio",
- "tower",
- "tower-layer",
- "tower-service",
- "tracing",
-]
-
-[[package]]
 name = "axum-core"
 version = "0.3.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -374,33 +340,12 @@ dependencies = [
  "async-trait",
  "bytes",
  "futures-util",
- "http 0.2.11",
- "http-body 0.4.6",
- "mime",
- "rustversion",
- "tower-layer",
- "tower-service",
-]
-
-[[package]]
-name = "axum-core"
-version = "0.4.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a15c63fd72d41492dc4f497196f5da1fb04fb7529e631d73630d1b491e47a2e3"
-dependencies = [
- "async-trait",
- "bytes",
- "futures-util",
- "http 1.1.0",
- "http-body 1.0.0",
- "http-body-util",
+ "http",
+ "http-body",
  "mime",
- "pin-project-lite",
  "rustversion",
- "sync_wrapper",
  "tower-layer",
  "tower-service",
- "tracing",
 ]
 
 [[package]]
@@ -436,9 +381,9 @@ version = "0.2.9"
 source = "git+https://github.com/flokli/bigtable_rs?rev=0af404741dfc40eb9fa99cf4d4140a09c5c20df7#0af404741dfc40eb9fa99cf4d4140a09c5c20df7"
 dependencies = [
  "gcp_auth",
- "http 0.2.11",
+ "http",
  "log",
- "prost 0.12.3",
+ "prost",
  "prost-build",
  "prost-types",
  "prost-wkt",
@@ -448,7 +393,7 @@ dependencies = [
  "serde_with",
  "thiserror",
  "tokio",
- "tonic 0.11.0",
+ "tonic",
  "tonic-build",
  "tower",
 ]
@@ -620,7 +565,7 @@ dependencies = [
  "num-traits",
  "serde",
  "wasm-bindgen",
- "windows-targets 0.52.0",
+ "windows-targets 0.52.5",
 ]
 
 [[package]]
@@ -733,6 +678,19 @@ dependencies = [
 ]
 
 [[package]]
+name = "console"
+version = "0.15.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb"
+dependencies = [
+ "encode_unicode",
+ "lazy_static",
+ "libc",
+ "unicode-width",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
 name = "const-oid"
 version = "0.9.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1063,6 +1021,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
 
 [[package]]
+name = "encode_unicode"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
+
+[[package]]
 name = "encoding_rs"
 version = "0.8.33"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1406,7 +1370,7 @@ dependencies = [
  "base64",
  "chrono",
  "home",
- "hyper 0.14.28",
+ "hyper",
  "hyper-rustls",
  "ring",
  "rustls 0.21.12",
@@ -1437,6 +1401,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "0b32dfe1fdfc0bbde1f22a5da25355514b5e450c33a6af6770884c8750aedfbc"
 
 [[package]]
+name = "generator"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "186014d53bc231d0090ef8d6f03e0920c54d85a5ed22f4f2f74315ec56cf83fb"
+dependencies = [
+ "cc",
+ "cfg-if",
+ "libc",
+ "log",
+ "rustversion",
+ "windows",
+]
+
+[[package]]
 name = "generic-array"
 version = "0.14.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1480,26 +1458,7 @@ dependencies = [
  "futures-core",
  "futures-sink",
  "futures-util",
- "http 0.2.11",
- "indexmap 2.1.0",
- "slab",
- "tokio",
- "tokio-util",
- "tracing",
-]
-
-[[package]]
-name = "h2"
-version = "0.4.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "816ec7294445779408f36fe57bc5b7fc1cf59664059096c65f905c1c61f58069"
-dependencies = [
- "bytes",
- "fnv",
- "futures-core",
- "futures-sink",
- "futures-util",
- "http 1.1.0",
+ "http",
  "indexmap 2.1.0",
  "slab",
  "tokio",
@@ -1574,49 +1533,21 @@ dependencies = [
 ]
 
 [[package]]
-name = "http"
-version = "1.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258"
-dependencies = [
- "bytes",
- "fnv",
- "itoa",
-]
-
-[[package]]
 name = "http-body"
 version = "0.4.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2"
 dependencies = [
  "bytes",
- "http 0.2.11",
+ "http",
  "pin-project-lite",
 ]
 
 [[package]]
-name = "http-body"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643"
-dependencies = [
- "bytes",
- "http 1.1.0",
-]
-
-[[package]]
-name = "http-body-util"
-version = "0.1.1"
+name = "http-range-header"
+version = "0.3.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d"
-dependencies = [
- "bytes",
- "futures-core",
- "http 1.1.0",
- "http-body 1.0.0",
- "pin-project-lite",
-]
+checksum = "add0ab9360ddbd88cfeb3bd9574a1d85cfdfa14db10b3e21d3700dbc4328758f"
 
 [[package]]
 name = "httparse"
@@ -1646,9 +1577,9 @@ dependencies = [
  "futures-channel",
  "futures-core",
  "futures-util",
- "h2 0.3.26",
- "http 0.2.11",
- "http-body 0.4.6",
+ "h2",
+ "http",
+ "http-body",
  "httparse",
  "httpdate",
  "itoa",
@@ -1661,34 +1592,14 @@ dependencies = [
 ]
 
 [[package]]
-name = "hyper"
-version = "1.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d"
-dependencies = [
- "bytes",
- "futures-channel",
- "futures-util",
- "h2 0.4.4",
- "http 1.1.0",
- "http-body 1.0.0",
- "httparse",
- "httpdate",
- "itoa",
- "pin-project-lite",
- "smallvec",
- "tokio",
-]
-
-[[package]]
 name = "hyper-rustls"
 version = "0.24.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590"
 dependencies = [
  "futures-util",
- "http 0.2.11",
- "hyper 0.14.28",
+ "http",
+ "hyper",
  "rustls 0.21.12",
  "rustls-native-certs 0.6.3",
  "tokio",
@@ -1701,29 +1612,13 @@ version = "0.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1"
 dependencies = [
- "hyper 0.14.28",
+ "hyper",
  "pin-project-lite",
  "tokio",
  "tokio-io-timeout",
 ]
 
 [[package]]
-name = "hyper-util"
-version = "0.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa"
-dependencies = [
- "bytes",
- "futures-util",
- "http 1.1.0",
- "http-body 1.0.0",
- "hyper 1.3.1",
- "pin-project-lite",
- "socket2",
- "tokio",
-]
-
-[[package]]
 name = "iana-time-zone"
 version = "0.1.60"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1734,7 +1629,7 @@ dependencies = [
  "iana-time-zone-haiku",
  "js-sys",
  "wasm-bindgen",
- "windows-core",
+ "windows-core 0.52.0",
 ]
 
 [[package]]
@@ -1809,6 +1704,20 @@ dependencies = [
 ]
 
 [[package]]
+name = "indicatif"
+version = "0.17.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3"
+dependencies = [
+ "console",
+ "instant",
+ "number_prefix",
+ "portable-atomic",
+ "unicode-width",
+ "vt100",
+]
+
+[[package]]
 name = "instant"
 version = "0.1.12"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2013,6 +1922,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
 
 [[package]]
+name = "loom"
+version = "0.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca"
+dependencies = [
+ "cfg-if",
+ "generator",
+ "scoped-tls",
+ "tracing",
+ "tracing-subscriber",
+]
+
+[[package]]
 name = "lru"
 version = "0.12.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2282,6 +2204,12 @@ dependencies = [
 ]
 
 [[package]]
+name = "number_prefix"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
+
+[[package]]
 name = "object"
 version = "0.32.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2302,7 +2230,7 @@ dependencies = [
  "chrono",
  "futures",
  "humantime",
- "hyper 0.14.28",
+ "hyper",
  "itertools 0.12.0",
  "md-5",
  "parking_lot 0.12.2",
@@ -2341,13 +2269,12 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
 
 [[package]]
 name = "opentelemetry"
-version = "0.21.0"
+version = "0.22.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e32339a5dc40459130b3bd269e9892439f55b33e772d2a9d402a789baaf4e8a"
+checksum = "900d57987be3f2aeb70d385fff9b27fb74c5723cc9a52d904d4f9c807a0667bf"
 dependencies = [
  "futures-core",
  "futures-sink",
- "indexmap 2.1.0",
  "js-sys",
  "once_cell",
  "pin-project-lite",
@@ -2356,50 +2283,59 @@ dependencies = [
 ]
 
 [[package]]
+name = "opentelemetry-http"
+version = "0.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7690dc77bf776713848c4faa6501157469017eaf332baccd4eb1cea928743d94"
+dependencies = [
+ "async-trait",
+ "bytes",
+ "http",
+ "opentelemetry",
+]
+
+[[package]]
 name = "opentelemetry-otlp"
-version = "0.14.0"
+version = "0.15.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f24cda83b20ed2433c68241f918d0f6fdec8b1d43b7a9590ab4420c5095ca930"
+checksum = "1a016b8d9495c639af2145ac22387dcb88e44118e45320d9238fbf4e7889abcb"
 dependencies = [
  "async-trait",
  "futures-core",
- "http 0.2.11",
+ "http",
  "opentelemetry",
  "opentelemetry-proto",
  "opentelemetry-semantic-conventions",
  "opentelemetry_sdk",
- "prost 0.11.9",
+ "prost",
  "thiserror",
  "tokio",
- "tonic 0.9.2",
+ "tonic",
 ]
 
 [[package]]
 name = "opentelemetry-proto"
-version = "0.4.0"
+version = "0.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a2e155ce5cc812ea3d1dffbd1539aed653de4bf4882d60e6e04dcf0901d674e1"
+checksum = "3a8fddc9b68f5b80dae9d6f510b88e02396f006ad48cac349411fbecc80caae4"
 dependencies = [
  "opentelemetry",
  "opentelemetry_sdk",
- "prost 0.11.9",
- "tonic 0.9.2",
+ "prost",
+ "tonic",
 ]
 
 [[package]]
 name = "opentelemetry-semantic-conventions"
-version = "0.13.0"
+version = "0.14.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f5774f1ef1f982ef2a447f6ee04ec383981a3ab99c8e77a1a7b30182e65bbc84"
-dependencies = [
- "opentelemetry",
-]
+checksum = "f9ab5bd6c42fb9349dcf28af2ba9a0667f697f9bdcca045d39f2cec5543e2910"
 
 [[package]]
 name = "opentelemetry_sdk"
-version = "0.21.2"
+version = "0.22.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2f16aec8a98a457a52664d69e0091bac3a0abd18ead9b641cb00202ba4e0efe4"
+checksum = "9e90c7113be649e31e9a0f8b5ee24ed7a16923b322c3c5ab6367469c049d6b7e"
 dependencies = [
  "async-trait",
  "crossbeam-channel",
@@ -2625,6 +2561,12 @@ dependencies = [
 ]
 
 [[package]]
+name = "portable-atomic"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0"
+
+[[package]]
 name = "powerfmt"
 version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2687,22 +2629,12 @@ dependencies = [
 
 [[package]]
 name = "prost"
-version = "0.11.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd"
-dependencies = [
- "bytes",
- "prost-derive 0.11.9",
-]
-
-[[package]]
-name = "prost"
 version = "0.12.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a"
 dependencies = [
  "bytes",
- "prost-derive 0.12.3",
+ "prost-derive",
 ]
 
 [[package]]
@@ -2719,7 +2651,7 @@ dependencies = [
  "once_cell",
  "petgraph",
  "prettyplease",
- "prost 0.12.3",
+ "prost",
  "prost-types",
  "pulldown-cmark",
  "pulldown-cmark-to-cmark",
@@ -2731,19 +2663,6 @@ dependencies = [
 
 [[package]]
 name = "prost-derive"
-version = "0.11.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4"
-dependencies = [
- "anyhow",
- "itertools 0.10.5",
- "proc-macro2",
- "quote",
- "syn 1.0.109",
-]
-
-[[package]]
-name = "prost-derive"
 version = "0.12.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e"
@@ -2761,7 +2680,7 @@ version = "0.12.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "193898f59edcf43c26227dcd4c8427f00d99d61e95dcde58dabd49fa291d470e"
 dependencies = [
- "prost 0.12.3",
+ "prost",
 ]
 
 [[package]]
@@ -2772,7 +2691,7 @@ checksum = "4d8ef9c3f0f1dab910d2b7e2c24a8e4322e122eba6d7a1921eeebcebbc046c40"
 dependencies = [
  "chrono",
  "inventory",
- "prost 0.12.3",
+ "prost",
  "serde",
  "serde_derive",
  "serde_json",
@@ -2786,7 +2705,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "5b31cae9a54ca84fee1504740a82eebf2479532905e106f63ca0c3bc8d780321"
 dependencies = [
  "heck",
- "prost 0.12.3",
+ "prost",
  "prost-build",
  "prost-types",
  "quote",
@@ -2799,7 +2718,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "435be4a8704091b4c5fb1d79799de7f2dbff53af05edf29385237f8cf7ab37ee"
 dependencies = [
  "chrono",
- "prost 0.12.3",
+ "prost",
  "prost-build",
  "prost-types",
  "prost-wkt",
@@ -3032,10 +2951,10 @@ dependencies = [
  "encoding_rs",
  "futures-core",
  "futures-util",
- "h2 0.3.26",
- "http 0.2.11",
- "http-body 0.4.6",
- "hyper 0.14.28",
+ "h2",
+ "http",
+ "http-body",
+ "hyper",
  "hyper-rustls",
  "ipnet",
  "js-sys",
@@ -3337,6 +3256,12 @@ dependencies = [
 ]
 
 [[package]]
+name = "scoped-tls"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294"
+
+[[package]]
 name = "scopeguard"
 version = "1.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3413,16 +3338,6 @@ dependencies = [
 ]
 
 [[package]]
-name = "serde_path_to_error"
-version = "0.1.16"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6"
-dependencies = [
- "itoa",
- "serde",
-]
-
-[[package]]
 name = "serde_qs"
 version = "0.12.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3797,6 +3712,35 @@ dependencies = [
 ]
 
 [[package]]
+name = "threadpool"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa"
+dependencies = [
+ "num_cpus",
+]
+
+[[package]]
+name = "tikv-jemalloc-sys"
+version = "0.5.4+5.3.0-patched"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9402443cb8fd499b6f327e40565234ff34dbda27460c5b47db0db77443dd85d1"
+dependencies = [
+ "cc",
+ "libc",
+]
+
+[[package]]
+name = "tikv-jemallocator"
+version = "0.5.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "965fe0c26be5c56c94e38ba547249074803efd52adfb66de62107d95aab3eaca"
+dependencies = [
+ "libc",
+ "tikv-jemalloc-sys",
+]
+
+[[package]]
 name = "time"
 version = "0.3.34"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3882,11 +3826,10 @@ dependencies = [
 
 [[package]]
 name = "tokio-listener"
-version = "0.3.2"
+version = "0.4.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "96367e127b4cf47b92592a5154a563435fe28fe3fccf25917d4a34ee59c87303"
+checksum = "4134661e12ec11c6276be73544a43144a357b08dfab5c41fd226e15b5bc9a6b2"
 dependencies = [
- "axum 0.7.4",
  "document-features",
  "futures-core",
  "futures-util",
@@ -3895,7 +3838,7 @@ dependencies = [
  "socket2",
  "tokio",
  "tokio-util",
- "tonic 0.11.0",
+ "tonic",
  "tracing",
 ]
 
@@ -4032,51 +3975,23 @@ dependencies = [
 
 [[package]]
 name = "tonic"
-version = "0.9.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3082666a3a6433f7f511c7192923fa1fe07c69332d3c6a2e6bb040b569199d5a"
-dependencies = [
- "async-trait",
- "axum 0.6.20",
- "base64",
- "bytes",
- "futures-core",
- "futures-util",
- "h2 0.3.26",
- "http 0.2.11",
- "http-body 0.4.6",
- "hyper 0.14.28",
- "hyper-timeout",
- "percent-encoding",
- "pin-project",
- "prost 0.11.9",
- "tokio",
- "tokio-stream",
- "tower",
- "tower-layer",
- "tower-service",
- "tracing",
-]
-
-[[package]]
-name = "tonic"
 version = "0.11.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "76c4eb7a4e9ef9d4763600161f12f5070b92a578e1b634db88a6887844c91a13"
 dependencies = [
  "async-stream",
  "async-trait",
- "axum 0.6.20",
+ "axum",
  "base64",
  "bytes",
- "h2 0.3.26",
- "http 0.2.11",
- "http-body 0.4.6",
- "hyper 0.14.28",
+ "h2",
+ "http",
+ "http-body",
+ "hyper",
  "hyper-timeout",
  "percent-encoding",
  "pin-project",
- "prost 0.12.3",
+ "prost",
  "rustls-native-certs 0.7.0",
  "rustls-pemfile 2.1.0",
  "rustls-pki-types",
@@ -4108,11 +4023,11 @@ version = "0.11.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "548c227bd5c0fae5925812c4ec6c66ffcfced23ea370cb823f4d18f0fc1cb6a7"
 dependencies = [
- "prost 0.12.3",
+ "prost",
  "prost-types",
  "tokio",
  "tokio-stream",
- "tonic 0.11.0",
+ "tonic",
 ]
 
 [[package]]
@@ -4136,6 +4051,25 @@ dependencies = [
 ]
 
 [[package]]
+name = "tower-http"
+version = "0.4.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140"
+dependencies = [
+ "bitflags 2.4.2",
+ "bytes",
+ "futures-core",
+ "futures-util",
+ "http",
+ "http-body",
+ "http-range-header",
+ "pin-project-lite",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
 name = "tower-layer"
 version = "0.3.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -4191,6 +4125,18 @@ dependencies = [
 ]
 
 [[package]]
+name = "tracing-indicatif"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "069580424efe11d97c3fef4197fa98c004fa26672cc71ad8770d224e23b1951d"
+dependencies = [
+ "indicatif",
+ "tracing",
+ "tracing-core",
+ "tracing-subscriber",
+]
+
+[[package]]
 name = "tracing-log"
 version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -4203,9 +4149,9 @@ dependencies = [
 
 [[package]]
 name = "tracing-opentelemetry"
-version = "0.22.0"
+version = "0.23.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c67ac25c5407e7b961fafc6f7e9aa5958fd297aada2d20fa2ae1737357e55596"
+checksum = "a9be14ba1bbe4ab79e9229f7f89fab8d120b865859f10527f31c033e599d2284"
 dependencies = [
  "js-sys",
  "once_cell",
@@ -4220,16 +4166,6 @@ dependencies = [
 ]
 
 [[package]]
-name = "tracing-serde"
-version = "0.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1"
-dependencies = [
- "serde",
- "tracing-core",
-]
-
-[[package]]
 name = "tracing-subscriber"
 version = "0.3.18"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -4239,15 +4175,43 @@ dependencies = [
  "nu-ansi-term",
  "once_cell",
  "regex",
- "serde",
- "serde_json",
  "sharded-slab",
  "smallvec",
  "thread_local",
  "tracing",
  "tracing-core",
  "tracing-log",
- "tracing-serde",
+]
+
+[[package]]
+name = "tracing-tracy"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6024d04f84a69fd0d1dc1eee3a2b070bd246530a0582f9982ae487cb6c703614"
+dependencies = [
+ "tracing-core",
+ "tracing-subscriber",
+ "tracy-client",
+]
+
+[[package]]
+name = "tracy-client"
+version = "0.17.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59fb931a64ff88984f86d3e9bcd1ae8843aa7fe44dd0f8097527bc172351741d"
+dependencies = [
+ "loom",
+ "once_cell",
+ "tracy-client-sys",
+]
+
+[[package]]
+name = "tracy-client-sys"
+version = "0.22.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9d104d610dfa9dd154535102cc9c6164ae1fa37842bc2d9e83f9ac82b0ae0882"
+dependencies = [
+ "cc",
 ]
 
 [[package]]
@@ -4263,18 +4227,18 @@ dependencies = [
  "bytes",
  "clap",
  "itertools 0.12.0",
- "prost 0.12.3",
+ "prost",
  "prost-build",
  "rstest",
  "thiserror",
  "tokio",
  "tokio-listener",
- "tonic 0.11.0",
+ "tonic",
  "tonic-build",
  "tonic-reflection",
  "tracing",
- "tracing-subscriber",
  "tvix-castore",
+ "tvix-tracing",
  "url",
 ]
 
@@ -4282,6 +4246,7 @@ dependencies = [
 name = "tvix-castore"
 version = "0.1.0"
 dependencies = [
+ "async-compression",
  "async-process",
  "async-stream",
  "async-tempfile",
@@ -4301,7 +4266,7 @@ dependencies = [
  "parking_lot 0.12.2",
  "petgraph",
  "pin-project-lite",
- "prost 0.12.3",
+ "prost",
  "prost-build",
  "rstest",
  "rstest_reuse",
@@ -4311,16 +4276,19 @@ dependencies = [
  "sled",
  "tempfile",
  "thiserror",
+ "threadpool",
  "tokio",
  "tokio-retry",
  "tokio-stream",
  "tokio-tar",
  "tokio-util",
- "tonic 0.11.0",
+ "tonic",
  "tonic-build",
  "tonic-reflection",
  "tower",
  "tracing",
+ "tracing-indicatif",
+ "tvix-tracing",
  "url",
  "vhost",
  "vhost-user-backend",
@@ -4341,16 +4309,19 @@ dependencies = [
  "clap",
  "dirs",
  "nix-compat",
+ "rnix",
  "rustyline",
  "thiserror",
+ "tikv-jemallocator",
  "tokio",
  "tracing",
- "tracing-subscriber",
+ "tracing-indicatif",
  "tvix-build",
  "tvix-castore",
  "tvix-eval",
  "tvix-glue",
  "tvix-store",
+ "tvix-tracing",
  "wu-manber",
 ]
 
@@ -4387,9 +4358,9 @@ dependencies = [
  "tabwriter",
  "tempfile",
  "test-strategy",
+ "tikv-jemallocator",
  "toml",
  "tvix-eval-builtin-macros",
- "xml-rs",
 ]
 
 [[package]]
@@ -4428,14 +4399,17 @@ dependencies = [
  "sha2",
  "tempfile",
  "thiserror",
+ "tikv-jemallocator",
  "tokio",
  "tokio-tar",
  "tokio-util",
  "tracing",
+ "tracing-indicatif",
  "tvix-build",
  "tvix-castore",
  "tvix-eval",
  "tvix-store",
+ "tvix-tracing",
  "url",
  "walkdir",
  "wu-manber",
@@ -4469,12 +4443,9 @@ dependencies = [
  "lazy_static",
  "lru",
  "nix-compat",
- "opentelemetry",
- "opentelemetry-otlp",
- "opentelemetry_sdk",
  "parking_lot 0.12.2",
  "pin-project-lite",
- "prost 0.12.3",
+ "prost",
  "prost-build",
  "reqwest",
  "rstest",
@@ -4492,19 +4463,41 @@ dependencies = [
  "tokio-retry",
  "tokio-stream",
  "tokio-util",
- "tonic 0.11.0",
+ "tonic",
  "tonic-build",
  "tonic-reflection",
  "tower",
+ "tower-http",
  "tracing",
- "tracing-opentelemetry",
- "tracing-subscriber",
+ "tracing-indicatif",
  "tvix-castore",
+ "tvix-tracing",
  "url",
  "walkdir",
 ]
 
 [[package]]
+name = "tvix-tracing"
+version = "0.1.0"
+dependencies = [
+ "http",
+ "indicatif",
+ "lazy_static",
+ "opentelemetry",
+ "opentelemetry-http",
+ "opentelemetry-otlp",
+ "opentelemetry_sdk",
+ "thiserror",
+ "tokio",
+ "tonic",
+ "tracing",
+ "tracing-indicatif",
+ "tracing-opentelemetry",
+ "tracing-subscriber",
+ "tracing-tracy",
+]
+
+[[package]]
 name = "typenum"
 version = "1.17.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -4711,6 +4704,39 @@ dependencies = [
 ]
 
 [[package]]
+name = "vt100"
+version = "0.15.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "84cd863bf0db7e392ba3bd04994be3473491b31e66340672af5d11943c6274de"
+dependencies = [
+ "itoa",
+ "log",
+ "unicode-width",
+ "vte",
+]
+
+[[package]]
+name = "vte"
+version = "0.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f5022b5fbf9407086c180e9557be968742d839e68346af7792b8592489732197"
+dependencies = [
+ "arrayvec",
+ "utf8parse",
+ "vte_generate_state_changes",
+]
+
+[[package]]
+name = "vte_generate_state_changes"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d257817081c7dffcdbab24b9e62d2def62e2ff7d00b1c20062551e6cccc145ff"
+dependencies = [
+ "proc-macro2",
+ "quote",
+]
+
+[[package]]
 name = "wait-timeout"
 version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -4835,9 +4861,9 @@ dependencies = [
 
 [[package]]
 name = "web-time"
-version = "0.2.4"
+version = "1.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "aa30049b1c872b72c89866d458eae9f20380ab280ffd1b1e18df2d3e2d98cfe0"
+checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb"
 dependencies = [
  "js-sys",
  "wasm-bindgen",
@@ -4900,12 +4926,41 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
 
 [[package]]
+name = "windows"
+version = "0.54.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9252e5725dbed82865af151df558e754e4a3c2c30818359eb17465f1346a1b49"
+dependencies = [
+ "windows-core 0.54.0",
+ "windows-targets 0.52.5",
+]
+
+[[package]]
 name = "windows-core"
 version = "0.52.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9"
 dependencies = [
- "windows-targets 0.52.0",
+ "windows-targets 0.52.5",
+]
+
+[[package]]
+name = "windows-core"
+version = "0.54.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "12661b9c89351d684a50a8a643ce5f608e20243b9fb84687800163429f161d65"
+dependencies = [
+ "windows-result",
+ "windows-targets 0.52.5",
+]
+
+[[package]]
+name = "windows-result"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8"
+dependencies = [
+ "windows-targets 0.52.5",
 ]
 
 [[package]]
@@ -4923,7 +4978,7 @@ version = "0.52.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
 dependencies = [
- "windows-targets 0.52.0",
+ "windows-targets 0.52.5",
 ]
 
 [[package]]
@@ -4943,17 +4998,18 @@ dependencies = [
 
 [[package]]
 name = "windows-targets"
-version = "0.52.0"
+version = "0.52.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd"
+checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb"
 dependencies = [
- "windows_aarch64_gnullvm 0.52.0",
- "windows_aarch64_msvc 0.52.0",
- "windows_i686_gnu 0.52.0",
- "windows_i686_msvc 0.52.0",
- "windows_x86_64_gnu 0.52.0",
- "windows_x86_64_gnullvm 0.52.0",
- "windows_x86_64_msvc 0.52.0",
+ "windows_aarch64_gnullvm 0.52.5",
+ "windows_aarch64_msvc 0.52.5",
+ "windows_i686_gnu 0.52.5",
+ "windows_i686_gnullvm",
+ "windows_i686_msvc 0.52.5",
+ "windows_x86_64_gnu 0.52.5",
+ "windows_x86_64_gnullvm 0.52.5",
+ "windows_x86_64_msvc 0.52.5",
 ]
 
 [[package]]
@@ -4964,9 +5020,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
 
 [[package]]
 name = "windows_aarch64_gnullvm"
-version = "0.52.0"
+version = "0.52.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea"
+checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263"
 
 [[package]]
 name = "windows_aarch64_msvc"
@@ -4976,9 +5032,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
 
 [[package]]
 name = "windows_aarch64_msvc"
-version = "0.52.0"
+version = "0.52.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef"
+checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6"
 
 [[package]]
 name = "windows_i686_gnu"
@@ -4988,9 +5044,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
 
 [[package]]
 name = "windows_i686_gnu"
-version = "0.52.0"
+version = "0.52.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313"
+checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9"
 
 [[package]]
 name = "windows_i686_msvc"
@@ -5000,9 +5062,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
 
 [[package]]
 name = "windows_i686_msvc"
-version = "0.52.0"
+version = "0.52.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a"
+checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf"
 
 [[package]]
 name = "windows_x86_64_gnu"
@@ -5012,9 +5074,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
 
 [[package]]
 name = "windows_x86_64_gnu"
-version = "0.52.0"
+version = "0.52.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd"
+checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9"
 
 [[package]]
 name = "windows_x86_64_gnullvm"
@@ -5024,9 +5086,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
 
 [[package]]
 name = "windows_x86_64_gnullvm"
-version = "0.52.0"
+version = "0.52.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e"
+checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596"
 
 [[package]]
 name = "windows_x86_64_msvc"
@@ -5036,9 +5098,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
 
 [[package]]
 name = "windows_x86_64_msvc"
-version = "0.52.0"
+version = "0.52.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"
+checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0"
 
 [[package]]
 name = "winreg"
@@ -5067,12 +5129,6 @@ dependencies = [
 ]
 
 [[package]]
-name = "xml-rs"
-version = "0.8.19"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0fcb9cbac069e033553e8bb871be2fbdffcab578eb25bd0f7c508cedc6dcd75a"
-
-[[package]]
 name = "xz2"
 version = "0.1.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/tvix/Cargo.nix b/tvix/Cargo.nix
index 32f0c8c8e9..008080c461 100644
--- a/tvix/Cargo.nix
+++ b/tvix/Cargo.nix
@@ -123,6 +123,16 @@ rec {
       # File a bug if you depend on any for non-debug work!
       debug = internal.debugCrate { inherit packageId; };
     };
+    "tvix-tracing" = rec {
+      packageId = "tvix-tracing";
+      build = internal.buildRustCrateWithFeatures {
+        packageId = "tvix-tracing";
+      };
+
+      # Debug support which might change between releases.
+      # File a bug if you depend on any for non-debug work!
+      debug = internal.debugCrate { inherit packageId; };
+    };
   };
 
   # A derivation that joins the outputs of all workspace members together.
@@ -994,7 +1004,7 @@ rec {
         ];
 
       };
-      "axum 0.6.20" = rec {
+      "axum" = rec {
         crateName = "axum";
         version = "0.6.20";
         edition = "2021";
@@ -1006,7 +1016,7 @@ rec {
           }
           {
             name = "axum-core";
-            packageId = "axum-core 0.3.4";
+            packageId = "axum-core";
           }
           {
             name = "bitflags";
@@ -1024,15 +1034,15 @@ rec {
           }
           {
             name = "http";
-            packageId = "http 0.2.11";
+            packageId = "http";
           }
           {
             name = "http-body";
-            packageId = "http-body 0.4.6";
+            packageId = "http-body";
           }
           {
             name = "hyper";
-            packageId = "hyper 0.14.28";
+            packageId = "hyper";
             features = [ "stream" ];
           }
           {
@@ -1122,184 +1132,7 @@ rec {
           "ws" = [ "tokio" "dep:tokio-tungstenite" "dep:sha1" "dep:base64" ];
         };
       };
-      "axum 0.7.4" = rec {
-        crateName = "axum";
-        version = "0.7.4";
-        edition = "2021";
-        sha256 = "17kv7v8m981cqmfbv5m538fzxhw51l9bajv06kfddi7njarb8dhj";
-        dependencies = [
-          {
-            name = "async-trait";
-            packageId = "async-trait";
-          }
-          {
-            name = "axum-core";
-            packageId = "axum-core 0.4.3";
-          }
-          {
-            name = "bytes";
-            packageId = "bytes";
-          }
-          {
-            name = "futures-util";
-            packageId = "futures-util";
-            usesDefaultFeatures = false;
-            features = [ "alloc" ];
-          }
-          {
-            name = "http";
-            packageId = "http 1.1.0";
-          }
-          {
-            name = "http-body";
-            packageId = "http-body 1.0.0";
-          }
-          {
-            name = "http-body-util";
-            packageId = "http-body-util";
-          }
-          {
-            name = "hyper";
-            packageId = "hyper 1.3.1";
-            optional = true;
-          }
-          {
-            name = "hyper-util";
-            packageId = "hyper-util";
-            optional = true;
-            features = [ "tokio" "server" "server-auto" ];
-          }
-          {
-            name = "itoa";
-            packageId = "itoa";
-          }
-          {
-            name = "matchit";
-            packageId = "matchit";
-          }
-          {
-            name = "memchr";
-            packageId = "memchr";
-          }
-          {
-            name = "mime";
-            packageId = "mime";
-          }
-          {
-            name = "percent-encoding";
-            packageId = "percent-encoding";
-          }
-          {
-            name = "pin-project-lite";
-            packageId = "pin-project-lite";
-          }
-          {
-            name = "serde";
-            packageId = "serde";
-          }
-          {
-            name = "serde_json";
-            packageId = "serde_json";
-            optional = true;
-            features = [ "raw_value" ];
-          }
-          {
-            name = "serde_path_to_error";
-            packageId = "serde_path_to_error";
-            optional = true;
-          }
-          {
-            name = "serde_urlencoded";
-            packageId = "serde_urlencoded";
-            optional = true;
-          }
-          {
-            name = "sync_wrapper";
-            packageId = "sync_wrapper";
-          }
-          {
-            name = "tokio";
-            packageId = "tokio";
-            rename = "tokio";
-            optional = true;
-            features = [ "time" ];
-          }
-          {
-            name = "tower";
-            packageId = "tower";
-            usesDefaultFeatures = false;
-            features = [ "util" ];
-          }
-          {
-            name = "tower-layer";
-            packageId = "tower-layer";
-          }
-          {
-            name = "tower-service";
-            packageId = "tower-service";
-          }
-          {
-            name = "tracing";
-            packageId = "tracing";
-            optional = true;
-            usesDefaultFeatures = false;
-          }
-        ];
-        buildDependencies = [
-          {
-            name = "rustversion";
-            packageId = "rustversion";
-          }
-        ];
-        devDependencies = [
-          {
-            name = "rustversion";
-            packageId = "rustversion";
-          }
-          {
-            name = "serde";
-            packageId = "serde";
-            features = [ "derive" ];
-          }
-          {
-            name = "serde_json";
-            packageId = "serde_json";
-          }
-          {
-            name = "tokio";
-            packageId = "tokio";
-            rename = "tokio";
-            features = [ "macros" "rt" "rt-multi-thread" "net" "test-util" ];
-          }
-          {
-            name = "tower";
-            packageId = "tower";
-            rename = "tower";
-            features = [ "util" "timeout" "limit" "load-shed" "steer" "filter" ];
-          }
-          {
-            name = "tracing";
-            packageId = "tracing";
-          }
-        ];
-        features = {
-          "__private_docs" = [ "tower/full" "dep:tower-http" ];
-          "default" = [ "form" "http1" "json" "matched-path" "original-uri" "query" "tokio" "tower-log" "tracing" ];
-          "form" = [ "dep:serde_urlencoded" ];
-          "http1" = [ "dep:hyper" "hyper?/http1" ];
-          "http2" = [ "dep:hyper" "hyper?/http2" ];
-          "json" = [ "dep:serde_json" "dep:serde_path_to_error" ];
-          "macros" = [ "dep:axum-macros" ];
-          "multipart" = [ "dep:multer" ];
-          "query" = [ "dep:serde_urlencoded" ];
-          "tokio" = [ "dep:hyper-util" "dep:tokio" "tokio/net" "tokio/rt" "tower/make" "tokio/macros" ];
-          "tower-log" = [ "tower/log" ];
-          "tracing" = [ "dep:tracing" "axum-core/tracing" ];
-          "ws" = [ "dep:hyper" "tokio" "dep:tokio-tungstenite" "dep:sha1" "dep:base64" ];
-        };
-        resolvedDefaultFeatures = [ "default" "form" "http1" "json" "matched-path" "original-uri" "query" "tokio" "tower-log" "tracing" ];
-      };
-      "axum-core 0.3.4" = rec {
+      "axum-core" = rec {
         crateName = "axum-core";
         version = "0.3.4";
         edition = "2021";
@@ -1321,89 +1154,17 @@ rec {
           }
           {
             name = "http";
-            packageId = "http 0.2.11";
-          }
-          {
-            name = "http-body";
-            packageId = "http-body 0.4.6";
-          }
-          {
-            name = "mime";
-            packageId = "mime";
-          }
-          {
-            name = "tower-layer";
-            packageId = "tower-layer";
-          }
-          {
-            name = "tower-service";
-            packageId = "tower-service";
-          }
-        ];
-        buildDependencies = [
-          {
-            name = "rustversion";
-            packageId = "rustversion";
-          }
-        ];
-        devDependencies = [
-          {
-            name = "futures-util";
-            packageId = "futures-util";
-            usesDefaultFeatures = false;
-            features = [ "alloc" ];
-          }
-        ];
-        features = {
-          "__private_docs" = [ "dep:tower-http" ];
-          "tracing" = [ "dep:tracing" ];
-        };
-      };
-      "axum-core 0.4.3" = rec {
-        crateName = "axum-core";
-        version = "0.4.3";
-        edition = "2021";
-        sha256 = "1qx28wg4j6qdcdrisqwyaavlzc0zvbsrcwa99zf9456lfbyn6p51";
-        dependencies = [
-          {
-            name = "async-trait";
-            packageId = "async-trait";
-          }
-          {
-            name = "bytes";
-            packageId = "bytes";
-          }
-          {
-            name = "futures-util";
-            packageId = "futures-util";
-            usesDefaultFeatures = false;
-            features = [ "alloc" ];
-          }
-          {
-            name = "http";
-            packageId = "http 1.1.0";
+            packageId = "http";
           }
           {
             name = "http-body";
-            packageId = "http-body 1.0.0";
-          }
-          {
-            name = "http-body-util";
-            packageId = "http-body-util";
+            packageId = "http-body";
           }
           {
             name = "mime";
             packageId = "mime";
           }
           {
-            name = "pin-project-lite";
-            packageId = "pin-project-lite";
-          }
-          {
-            name = "sync_wrapper";
-            packageId = "sync_wrapper";
-          }
-          {
             name = "tower-layer";
             packageId = "tower-layer";
           }
@@ -1411,12 +1172,6 @@ rec {
             name = "tower-service";
             packageId = "tower-service";
           }
-          {
-            name = "tracing";
-            packageId = "tracing";
-            optional = true;
-            usesDefaultFeatures = false;
-          }
         ];
         buildDependencies = [
           {
@@ -1436,7 +1191,6 @@ rec {
           "__private_docs" = [ "dep:tower-http" ];
           "tracing" = [ "dep:tracing" ];
         };
-        resolvedDefaultFeatures = [ "tracing" ];
       };
       "backtrace" = rec {
         crateName = "backtrace";
@@ -1547,7 +1301,7 @@ rec {
           }
           {
             name = "http";
-            packageId = "http 0.2.11";
+            packageId = "http";
           }
           {
             name = "log";
@@ -1555,7 +1309,7 @@ rec {
           }
           {
             name = "prost";
-            packageId = "prost 0.12.3";
+            packageId = "prost";
           }
           {
             name = "prost-types";
@@ -1590,7 +1344,7 @@ rec {
           }
           {
             name = "tonic";
-            packageId = "tonic 0.11.0";
+            packageId = "tonic";
             features = [ "tls" "transport" ];
           }
           {
@@ -2076,7 +1830,7 @@ rec {
           }
           {
             name = "windows-targets";
-            packageId = "windows-targets 0.52.0";
+            packageId = "windows-targets 0.52.5";
             optional = true;
             target = { target, features }: (target."windows" or false);
           }
@@ -2382,6 +2136,47 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "std" ];
       };
+      "console" = rec {
+        crateName = "console";
+        version = "0.15.8";
+        edition = "2018";
+        sha256 = "1sz4nl9nz8pkmapqni6py7jxzi7nzqjxzb3ya4kxvmkb0zy867qf";
+        authors = [
+          "Armin Ronacher <armin.ronacher@active-4.com>"
+        ];
+        dependencies = [
+          {
+            name = "encode_unicode";
+            packageId = "encode_unicode";
+            target = { target, features }: (target."windows" or false);
+          }
+          {
+            name = "lazy_static";
+            packageId = "lazy_static";
+          }
+          {
+            name = "libc";
+            packageId = "libc";
+          }
+          {
+            name = "unicode-width";
+            packageId = "unicode-width";
+            optional = true;
+          }
+          {
+            name = "windows-sys";
+            packageId = "windows-sys 0.52.0";
+            target = { target, features }: (target."windows" or false);
+            features = [ "Win32_Foundation" "Win32_System_Console" "Win32_Storage_FileSystem" "Win32_UI_Input_KeyboardAndMouse" ];
+          }
+        ];
+        features = {
+          "default" = [ "unicode-width" "ansi-parsing" ];
+          "unicode-width" = [ "dep:unicode-width" ];
+          "windows-console-colors" = [ "ansi-parsing" ];
+        };
+        resolvedDefaultFeatures = [ "ansi-parsing" "unicode-width" ];
+      };
       "const-oid" = rec {
         crateName = "const-oid";
         version = "0.9.6";
@@ -3328,6 +3123,21 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "use_std" ];
       };
+      "encode_unicode" = rec {
+        crateName = "encode_unicode";
+        version = "0.3.6";
+        edition = "2015";
+        sha256 = "07w3vzrhxh9lpjgsg2y5bwzfar2aq35mdznvcp3zjl0ssj7d4mx3";
+        authors = [
+          "Torbjørn Birch Moltu <t.b.moltu@lyse.net>"
+        ];
+        features = {
+          "ascii" = [ "dep:ascii" ];
+          "clippy" = [ "dep:clippy" ];
+          "default" = [ "std" ];
+        };
+        resolvedDefaultFeatures = [ "default" "std" ];
+      };
       "encoding_rs" = rec {
         crateName = "encoding_rs";
         version = "0.8.33";
@@ -4334,7 +4144,7 @@ rec {
           }
           {
             name = "hyper";
-            packageId = "hyper 0.14.28";
+            packageId = "hyper";
             features = [ "client" "runtime" "http2" ];
           }
           {
@@ -4436,6 +4246,47 @@ rec {
         ];
         features = { };
       };
+      "generator" = rec {
+        crateName = "generator";
+        version = "0.8.1";
+        edition = "2021";
+        sha256 = "1yw3rxbfq5a3yzrg88pdln2lvi9014zg1mpq1q4x0cf27gai8q0q";
+        authors = [
+          "Xudong Huang <huangxu008@hotmail.com>"
+        ];
+        dependencies = [
+          {
+            name = "cfg-if";
+            packageId = "cfg-if";
+          }
+          {
+            name = "libc";
+            packageId = "libc";
+            target = { target, features }: (target."unix" or false);
+          }
+          {
+            name = "log";
+            packageId = "log";
+          }
+          {
+            name = "windows";
+            packageId = "windows";
+            target = { target, features }: (target."windows" or false);
+            features = [ "Win32_System_Memory" "Win32_System_Kernel" "Win32_Foundation" "Win32_System_SystemInformation" "Win32_System_Diagnostics_Debug" ];
+          }
+        ];
+        buildDependencies = [
+          {
+            name = "cc";
+            packageId = "cc";
+          }
+          {
+            name = "rustversion";
+            packageId = "rustversion";
+          }
+        ];
+
+      };
       "generic-array" = rec {
         crateName = "generic-array";
         version = "0.14.7";
@@ -4527,7 +4378,7 @@ rec {
         ];
 
       };
-      "h2 0.3.26" = rec {
+      "h2" = rec {
         crateName = "h2";
         version = "0.3.26";
         edition = "2018";
@@ -4562,79 +4413,7 @@ rec {
           }
           {
             name = "http";
-            packageId = "http 0.2.11";
-          }
-          {
-            name = "indexmap";
-            packageId = "indexmap 2.1.0";
-            features = [ "std" ];
-          }
-          {
-            name = "slab";
-            packageId = "slab";
-          }
-          {
-            name = "tokio";
-            packageId = "tokio";
-            features = [ "io-util" ];
-          }
-          {
-            name = "tokio-util";
-            packageId = "tokio-util";
-            features = [ "codec" "io" ];
-          }
-          {
-            name = "tracing";
-            packageId = "tracing";
-            usesDefaultFeatures = false;
-            features = [ "std" ];
-          }
-        ];
-        devDependencies = [
-          {
-            name = "tokio";
-            packageId = "tokio";
-            features = [ "rt-multi-thread" "macros" "sync" "net" ];
-          }
-        ];
-        features = { };
-      };
-      "h2 0.4.4" = rec {
-        crateName = "h2";
-        version = "0.4.4";
-        edition = "2021";
-        sha256 = "0sc0ymhiqp4hbz39d405cjbga77wnz2pprbgyc498xs58hlwfvl1";
-        authors = [
-          "Carl Lerche <me@carllerche.com>"
-          "Sean McArthur <sean@seanmonstar.com>"
-        ];
-        dependencies = [
-          {
-            name = "bytes";
-            packageId = "bytes";
-          }
-          {
-            name = "fnv";
-            packageId = "fnv";
-          }
-          {
-            name = "futures-core";
-            packageId = "futures-core";
-            usesDefaultFeatures = false;
-          }
-          {
-            name = "futures-sink";
-            packageId = "futures-sink";
-            usesDefaultFeatures = false;
-          }
-          {
-            name = "futures-util";
-            packageId = "futures-util";
-            usesDefaultFeatures = false;
-          }
-          {
-            name = "http";
-            packageId = "http 1.1.0";
+            packageId = "http";
           }
           {
             name = "indexmap";
@@ -4822,7 +4601,7 @@ rec {
         ];
 
       };
-      "http 0.2.11" = rec {
+      "http" = rec {
         crateName = "http";
         version = "0.2.11";
         edition = "2018";
@@ -4848,36 +4627,7 @@ rec {
         ];
 
       };
-      "http 1.1.0" = rec {
-        crateName = "http";
-        version = "1.1.0";
-        edition = "2018";
-        sha256 = "0n426lmcxas6h75c2cp25m933pswlrfjz10v91vc62vib2sdvf91";
-        authors = [
-          "Alex Crichton <alex@alexcrichton.com>"
-          "Carl Lerche <me@carllerche.com>"
-          "Sean McArthur <sean@seanmonstar.com>"
-        ];
-        dependencies = [
-          {
-            name = "bytes";
-            packageId = "bytes";
-          }
-          {
-            name = "fnv";
-            packageId = "fnv";
-          }
-          {
-            name = "itoa";
-            packageId = "itoa";
-          }
-        ];
-        features = {
-          "default" = [ "std" ];
-        };
-        resolvedDefaultFeatures = [ "default" "std" ];
-      };
-      "http-body 0.4.6" = rec {
+      "http-body" = rec {
         crateName = "http-body";
         version = "0.4.6";
         edition = "2018";
@@ -4894,7 +4644,7 @@ rec {
           }
           {
             name = "http";
-            packageId = "http 0.2.11";
+            packageId = "http";
           }
           {
             name = "pin-project-lite";
@@ -4903,61 +4653,12 @@ rec {
         ];
 
       };
-      "http-body 1.0.0" = rec {
-        crateName = "http-body";
-        version = "1.0.0";
-        edition = "2018";
-        sha256 = "0hyn8n3iadrbwq8y0p1rl1275s4nm49bllw5wji29g4aa3dqbb0w";
-        authors = [
-          "Carl Lerche <me@carllerche.com>"
-          "Lucio Franco <luciofranco14@gmail.com>"
-          "Sean McArthur <sean@seanmonstar.com>"
-        ];
-        dependencies = [
-          {
-            name = "bytes";
-            packageId = "bytes";
-          }
-          {
-            name = "http";
-            packageId = "http 1.1.0";
-          }
-        ];
-
-      };
-      "http-body-util" = rec {
-        crateName = "http-body-util";
-        version = "0.1.1";
+      "http-range-header" = rec {
+        crateName = "http-range-header";
+        version = "0.3.1";
         edition = "2018";
-        sha256 = "07agldas2qgcfc05ckiarlmf9vzragbda823nqhrqrc6mjrghx84";
-        authors = [
-          "Carl Lerche <me@carllerche.com>"
-          "Lucio Franco <luciofranco14@gmail.com>"
-          "Sean McArthur <sean@seanmonstar.com>"
-        ];
-        dependencies = [
-          {
-            name = "bytes";
-            packageId = "bytes";
-          }
-          {
-            name = "futures-core";
-            packageId = "futures-core";
-          }
-          {
-            name = "http";
-            packageId = "http 1.1.0";
-          }
-          {
-            name = "http-body";
-            packageId = "http-body 1.0.0";
-          }
-          {
-            name = "pin-project-lite";
-            packageId = "pin-project-lite";
-          }
-        ];
-
+        sha256 = "13vm511vq3bhschkw2xi9nhxzkw53m55gn9vxg7qigfxc29spl5d";
+        features = { };
       };
       "httparse" = rec {
         crateName = "httparse";
@@ -4992,7 +4693,7 @@ rec {
         ];
 
       };
-      "hyper 0.14.28" = rec {
+      "hyper" = rec {
         crateName = "hyper";
         version = "0.14.28";
         edition = "2018";
@@ -5021,16 +4722,16 @@ rec {
           }
           {
             name = "h2";
-            packageId = "h2 0.3.26";
+            packageId = "h2";
             optional = true;
           }
           {
             name = "http";
-            packageId = "http 0.2.11";
+            packageId = "http";
           }
           {
             name = "http-body";
-            packageId = "http-body 0.4.6";
+            packageId = "http-body";
           }
           {
             name = "httparse";
@@ -5099,104 +4800,6 @@ rec {
         };
         resolvedDefaultFeatures = [ "client" "default" "full" "h2" "http1" "http2" "runtime" "server" "socket2" "stream" "tcp" ];
       };
-      "hyper 1.3.1" = rec {
-        crateName = "hyper";
-        version = "1.3.1";
-        edition = "2021";
-        sha256 = "0va9pjqshsr8zc07m9h4j2821hsmd9lw9j416yisjqh8gp8msmzy";
-        authors = [
-          "Sean McArthur <sean@seanmonstar.com>"
-        ];
-        dependencies = [
-          {
-            name = "bytes";
-            packageId = "bytes";
-          }
-          {
-            name = "futures-channel";
-            packageId = "futures-channel";
-            optional = true;
-          }
-          {
-            name = "futures-util";
-            packageId = "futures-util";
-            optional = true;
-            usesDefaultFeatures = false;
-          }
-          {
-            name = "h2";
-            packageId = "h2 0.4.4";
-            optional = true;
-          }
-          {
-            name = "http";
-            packageId = "http 1.1.0";
-          }
-          {
-            name = "http-body";
-            packageId = "http-body 1.0.0";
-          }
-          {
-            name = "httparse";
-            packageId = "httparse";
-            optional = true;
-          }
-          {
-            name = "httpdate";
-            packageId = "httpdate";
-            optional = true;
-          }
-          {
-            name = "itoa";
-            packageId = "itoa";
-            optional = true;
-          }
-          {
-            name = "pin-project-lite";
-            packageId = "pin-project-lite";
-            optional = true;
-          }
-          {
-            name = "smallvec";
-            packageId = "smallvec";
-            optional = true;
-            features = [ "const_generics" "const_new" ];
-          }
-          {
-            name = "tokio";
-            packageId = "tokio";
-            features = [ "sync" ];
-          }
-        ];
-        devDependencies = [
-          {
-            name = "futures-channel";
-            packageId = "futures-channel";
-            features = [ "sink" ];
-          }
-          {
-            name = "futures-util";
-            packageId = "futures-util";
-            usesDefaultFeatures = false;
-            features = [ "alloc" "sink" ];
-          }
-          {
-            name = "tokio";
-            packageId = "tokio";
-            features = [ "fs" "macros" "net" "io-std" "io-util" "rt" "rt-multi-thread" "sync" "time" "test-util" ];
-          }
-        ];
-        features = {
-          "client" = [ "dep:want" "dep:pin-project-lite" "dep:smallvec" ];
-          "ffi" = [ "dep:libc" "dep:http-body-util" "futures-util?/alloc" ];
-          "full" = [ "client" "http1" "http2" "server" ];
-          "http1" = [ "dep:futures-channel" "dep:futures-util" "dep:httparse" "dep:itoa" ];
-          "http2" = [ "dep:futures-channel" "dep:futures-util" "dep:h2" ];
-          "server" = [ "dep:httpdate" "dep:pin-project-lite" "dep:smallvec" ];
-          "tracing" = [ "dep:tracing" ];
-        };
-        resolvedDefaultFeatures = [ "default" "http1" "http2" "server" ];
-      };
       "hyper-rustls" = rec {
         crateName = "hyper-rustls";
         version = "0.24.2";
@@ -5210,11 +4813,11 @@ rec {
           }
           {
             name = "http";
-            packageId = "http 0.2.11";
+            packageId = "http";
           }
           {
             name = "hyper";
-            packageId = "hyper 0.14.28";
+            packageId = "hyper";
             usesDefaultFeatures = false;
             features = [ "client" ];
           }
@@ -5241,7 +4844,7 @@ rec {
         devDependencies = [
           {
             name = "hyper";
-            packageId = "hyper 0.14.28";
+            packageId = "hyper";
             features = [ "full" ];
           }
           {
@@ -5283,7 +4886,7 @@ rec {
         dependencies = [
           {
             name = "hyper";
-            packageId = "hyper 0.14.28";
+            packageId = "hyper";
             features = [ "client" ];
           }
           {
@@ -5302,7 +4905,7 @@ rec {
         devDependencies = [
           {
             name = "hyper";
-            packageId = "hyper 0.14.28";
+            packageId = "hyper";
             features = [ "client" "http1" "tcp" ];
           }
           {
@@ -5313,82 +4916,6 @@ rec {
         ];
 
       };
-      "hyper-util" = rec {
-        crateName = "hyper-util";
-        version = "0.1.3";
-        edition = "2021";
-        sha256 = "1akngan7j0n2n0wd25c6952mvqbkj9gp1lcwzyxjc0d37l8yyf6a";
-        authors = [
-          "Sean McArthur <sean@seanmonstar.com>"
-        ];
-        dependencies = [
-          {
-            name = "bytes";
-            packageId = "bytes";
-          }
-          {
-            name = "futures-util";
-            packageId = "futures-util";
-            usesDefaultFeatures = false;
-          }
-          {
-            name = "http";
-            packageId = "http 1.1.0";
-          }
-          {
-            name = "http-body";
-            packageId = "http-body 1.0.0";
-          }
-          {
-            name = "hyper";
-            packageId = "hyper 1.3.1";
-          }
-          {
-            name = "pin-project-lite";
-            packageId = "pin-project-lite";
-          }
-          {
-            name = "socket2";
-            packageId = "socket2";
-            optional = true;
-            features = [ "all" ];
-          }
-          {
-            name = "tokio";
-            packageId = "tokio";
-            optional = true;
-            features = [ "net" "rt" "time" ];
-          }
-        ];
-        devDependencies = [
-          {
-            name = "bytes";
-            packageId = "bytes";
-          }
-          {
-            name = "hyper";
-            packageId = "hyper 1.3.1";
-            features = [ "full" ];
-          }
-          {
-            name = "tokio";
-            packageId = "tokio";
-            features = [ "macros" "test-util" ];
-          }
-        ];
-        features = {
-          "client" = [ "hyper/client" "dep:tracing" "dep:futures-channel" "dep:tower" "dep:tower-service" ];
-          "client-legacy" = [ "client" ];
-          "full" = [ "client" "client-legacy" "server" "server-auto" "service" "http1" "http2" "tokio" ];
-          "http1" = [ "hyper/http1" ];
-          "http2" = [ "hyper/http2" ];
-          "server" = [ "hyper/server" ];
-          "server-auto" = [ "server" "http1" "http2" ];
-          "service" = [ "dep:tower" "dep:tower-service" ];
-          "tokio" = [ "dep:tokio" "dep:socket2" ];
-        };
-        resolvedDefaultFeatures = [ "default" "http1" "http2" "server" "server-auto" "tokio" ];
-      };
       "iana-time-zone" = rec {
         crateName = "iana-time-zone";
         version = "0.1.60";
@@ -5427,7 +4954,7 @@ rec {
           }
           {
             name = "windows-core";
-            packageId = "windows-core";
+            packageId = "windows-core 0.52.0";
             target = { target, features }: ("windows" == target."os" or null);
           }
         ];
@@ -5644,6 +5171,55 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "serde" "std" ];
       };
+      "indicatif" = rec {
+        crateName = "indicatif";
+        version = "0.17.8";
+        edition = "2021";
+        sha256 = "18xyqxw9i5x4sbpzckhfz3nm984iq9r7nbi2lk76nz888n7mlfkn";
+        dependencies = [
+          {
+            name = "console";
+            packageId = "console";
+            usesDefaultFeatures = false;
+            features = [ "ansi-parsing" ];
+          }
+          {
+            name = "instant";
+            packageId = "instant";
+            target = { target, features }: ("wasm32" == target."arch" or null);
+          }
+          {
+            name = "number_prefix";
+            packageId = "number_prefix";
+          }
+          {
+            name = "portable-atomic";
+            packageId = "portable-atomic";
+          }
+          {
+            name = "unicode-width";
+            packageId = "unicode-width";
+            optional = true;
+          }
+          {
+            name = "vt100";
+            packageId = "vt100";
+            optional = true;
+          }
+        ];
+        features = {
+          "default" = [ "unicode-width" "console/unicode-width" ];
+          "futures" = [ "dep:futures-core" ];
+          "improved_unicode" = [ "unicode-segmentation" "unicode-width" "console/unicode-width" ];
+          "in_memory" = [ "vt100" ];
+          "rayon" = [ "dep:rayon" ];
+          "tokio" = [ "dep:tokio" ];
+          "unicode-segmentation" = [ "dep:unicode-segmentation" ];
+          "unicode-width" = [ "dep:unicode-width" ];
+          "vt100" = [ "dep:vt100" ];
+        };
+        resolvedDefaultFeatures = [ "default" "in_memory" "unicode-width" "vt100" ];
+      };
       "instant" = rec {
         crateName = "instant";
         version = "0.1.12";
@@ -6220,6 +5796,48 @@ rec {
         };
         resolvedDefaultFeatures = [ "std" ];
       };
+      "loom" = rec {
+        crateName = "loom";
+        version = "0.7.2";
+        edition = "2018";
+        sha256 = "1jpszf9qxv8ydpsm2h9vcyvxvyxcfkhmmfbylzd4gfbc0k40v7j1";
+        authors = [
+          "Carl Lerche <me@carllerche.com>"
+        ];
+        dependencies = [
+          {
+            name = "cfg-if";
+            packageId = "cfg-if";
+          }
+          {
+            name = "generator";
+            packageId = "generator";
+          }
+          {
+            name = "scoped-tls";
+            packageId = "scoped-tls";
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+            usesDefaultFeatures = false;
+            features = [ "std" ];
+          }
+          {
+            name = "tracing-subscriber";
+            packageId = "tracing-subscriber";
+            features = [ "env-filter" ];
+          }
+        ];
+        features = {
+          "checkpoint" = [ "serde" "serde_json" ];
+          "futures" = [ "pin-utils" ];
+          "pin-utils" = [ "dep:pin-utils" ];
+          "serde" = [ "dep:serde" ];
+          "serde_json" = [ "dep:serde_json" ];
+        };
+        resolvedDefaultFeatures = [ "default" ];
+      };
       "lru" = rec {
         crateName = "lru";
         version = "0.12.3";
@@ -7027,6 +6645,19 @@ rec {
         ];
 
       };
+      "number_prefix" = rec {
+        crateName = "number_prefix";
+        version = "0.4.0";
+        edition = "2015";
+        sha256 = "1wvh13wvlajqxkb1filsfzbrnq0vrmrw298v2j3sy82z1rm282w3";
+        authors = [
+          "Benjamin Sago <ogham@bsago.me>"
+        ];
+        features = {
+          "default" = [ "std" ];
+        };
+        resolvedDefaultFeatures = [ "default" "std" ];
+      };
       "object" = rec {
         crateName = "object";
         version = "0.32.2";
@@ -7096,7 +6727,7 @@ rec {
           }
           {
             name = "hyper";
-            packageId = "hyper 0.14.28";
+            packageId = "hyper";
             optional = true;
             usesDefaultFeatures = false;
           }
@@ -7190,7 +6821,7 @@ rec {
         devDependencies = [
           {
             name = "hyper";
-            packageId = "hyper 0.14.28";
+            packageId = "hyper";
             features = [ "server" ];
           }
           {
@@ -7259,9 +6890,9 @@ rec {
       };
       "opentelemetry" = rec {
         crateName = "opentelemetry";
-        version = "0.21.0";
+        version = "0.22.0";
         edition = "2021";
-        sha256 = "12jfmyx8k9q2sjlx4wp76ddzaf94i7lnkliv1c9mj164bnd36chy";
+        sha256 = "1gv70rx8172g9n82v9f97ircax7v4ydzyprq1nvsxwp3gfc5f3ch";
         dependencies = [
           {
             name = "futures-core";
@@ -7272,10 +6903,6 @@ rec {
             packageId = "futures-sink";
           }
           {
-            name = "indexmap";
-            packageId = "indexmap 2.1.0";
-          }
-          {
             name = "js-sys";
             packageId = "js-sys";
             target = { target, features }: (("wasm32" == target."arch" or null) && (!("wasi" == target."os" or null)));
@@ -7292,6 +6919,7 @@ rec {
           {
             name = "thiserror";
             packageId = "thiserror";
+            usesDefaultFeatures = false;
           }
           {
             name = "urlencoding";
@@ -7307,11 +6935,44 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "metrics" "pin-project-lite" "trace" ];
       };
+      "opentelemetry-http" = rec {
+        crateName = "opentelemetry-http";
+        version = "0.11.1";
+        edition = "2021";
+        sha256 = "151xfhlakkmi9v6sqarkmxz02sbl2l0nbajgij216rvppxvxr43n";
+        dependencies = [
+          {
+            name = "async-trait";
+            packageId = "async-trait";
+          }
+          {
+            name = "bytes";
+            packageId = "bytes";
+          }
+          {
+            name = "http";
+            packageId = "http";
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "opentelemetry";
+            packageId = "opentelemetry";
+            features = [ "trace" ];
+          }
+        ];
+        features = {
+          "hyper" = [ "dep:hyper" ];
+          "isahc" = [ "dep:isahc" ];
+          "reqwest" = [ "dep:reqwest" ];
+          "reqwest-rustls" = [ "reqwest" "reqwest/rustls-tls-native-roots" ];
+          "tokio" = [ "dep:tokio" ];
+        };
+      };
       "opentelemetry-otlp" = rec {
         crateName = "opentelemetry-otlp";
-        version = "0.14.0";
+        version = "0.15.0";
         edition = "2021";
-        sha256 = "0c59bh4wa824mf89ayivsjqwipkg1y6r27r4d0y47lhfna1xlk7j";
+        sha256 = "1jxbi5w4xgwg4gcj0lz4310y926bglw25b2546pkkilmjj6nn08s";
         dependencies = [
           {
             name = "async-trait";
@@ -7323,8 +6984,9 @@ rec {
           }
           {
             name = "http";
-            packageId = "http 0.2.11";
+            packageId = "http";
             optional = true;
+            usesDefaultFeatures = false;
           }
           {
             name = "opentelemetry";
@@ -7347,45 +7009,45 @@ rec {
           }
           {
             name = "prost";
-            packageId = "prost 0.11.9";
+            packageId = "prost";
             optional = true;
           }
           {
             name = "thiserror";
             packageId = "thiserror";
+            usesDefaultFeatures = false;
           }
           {
             name = "tokio";
             packageId = "tokio";
             optional = true;
+            usesDefaultFeatures = false;
             features = [ "sync" "rt" ];
           }
           {
             name = "tonic";
-            packageId = "tonic 0.9.2";
+            packageId = "tonic";
             optional = true;
+            usesDefaultFeatures = false;
           }
         ];
         devDependencies = [
           {
             name = "tokio";
             packageId = "tokio";
+            usesDefaultFeatures = false;
             features = [ "macros" "rt-multi-thread" ];
           }
         ];
         features = {
           "default" = [ "grpc-tonic" "trace" ];
-          "grpc-sys" = [ "grpcio" "opentelemetry-proto/gen-grpcio" ];
           "grpc-tonic" = [ "tonic" "prost" "http" "tokio" "opentelemetry-proto/gen-tonic" ];
-          "grpcio" = [ "dep:grpcio" ];
           "gzip-tonic" = [ "tonic/gzip" ];
           "http" = [ "dep:http" ];
           "http-proto" = [ "prost" "opentelemetry-http" "opentelemetry-proto/gen-tonic-messages" "http" "trace" "metrics" ];
           "integration-testing" = [ "tonic" "prost" "tokio/full" "trace" ];
           "logs" = [ "opentelemetry/logs" "opentelemetry_sdk/logs" "opentelemetry-proto/logs" ];
           "metrics" = [ "opentelemetry/metrics" "opentelemetry_sdk/metrics" "opentelemetry-proto/metrics" ];
-          "openssl" = [ "grpcio/openssl" ];
-          "openssl-vendored" = [ "grpcio/openssl-vendored" ];
           "opentelemetry-http" = [ "dep:opentelemetry-http" ];
           "prost" = [ "dep:prost" ];
           "reqwest" = [ "dep:reqwest" ];
@@ -7394,8 +7056,6 @@ rec {
           "reqwest-rustls" = [ "reqwest" "reqwest/rustls-tls-native-roots" ];
           "serde" = [ "dep:serde" ];
           "serialize" = [ "serde" ];
-          "surf" = [ "dep:surf" ];
-          "surf-client" = [ "surf" "opentelemetry-http/surf" ];
           "tls" = [ "tonic/tls" ];
           "tls-roots" = [ "tls" "tonic/tls-roots" ];
           "tokio" = [ "dep:tokio" ];
@@ -7406,9 +7066,9 @@ rec {
       };
       "opentelemetry-proto" = rec {
         crateName = "opentelemetry-proto";
-        version = "0.4.0";
+        version = "0.5.0";
         edition = "2021";
-        sha256 = "1qblsq0hkksdw3k60bc8yi5xwlynmqwibggz3lyyl4n8bk75bqd2";
+        sha256 = "1r5a1k4fryqijhsar36ld806yf82isw11xfnx7d80nwgnv4xv3rs";
         dependencies = [
           {
             name = "opentelemetry";
@@ -7422,53 +7082,47 @@ rec {
           }
           {
             name = "prost";
-            packageId = "prost 0.11.9";
+            packageId = "prost";
             optional = true;
           }
           {
             name = "tonic";
-            packageId = "tonic 0.9.2";
+            packageId = "tonic";
             optional = true;
             usesDefaultFeatures = false;
             features = [ "codegen" "prost" ];
           }
         ];
         features = {
-          "full" = [ "gen-tonic" "gen-grpcio" "trace" "logs" "metrics" "zpages" "with-serde" ];
-          "gen-grpcio" = [ "grpcio" "prost" ];
+          "full" = [ "gen-tonic" "trace" "logs" "metrics" "zpages" "with-serde" ];
           "gen-tonic" = [ "gen-tonic-messages" "tonic/transport" ];
           "gen-tonic-messages" = [ "tonic" "prost" ];
-          "grpcio" = [ "dep:grpcio" ];
+          "hex" = [ "dep:hex" ];
           "logs" = [ "opentelemetry/logs" "opentelemetry_sdk/logs" ];
           "metrics" = [ "opentelemetry/metrics" "opentelemetry_sdk/metrics" ];
           "prost" = [ "dep:prost" ];
+          "schemars" = [ "dep:schemars" ];
           "serde" = [ "dep:serde" ];
           "tonic" = [ "dep:tonic" ];
           "trace" = [ "opentelemetry/trace" "opentelemetry_sdk/trace" ];
-          "with-serde" = [ "serde" ];
+          "with-schemars" = [ "schemars" ];
+          "with-serde" = [ "serde" "hex" ];
           "zpages" = [ "trace" ];
         };
         resolvedDefaultFeatures = [ "gen-tonic" "gen-tonic-messages" "prost" "tonic" "trace" ];
       };
       "opentelemetry-semantic-conventions" = rec {
         crateName = "opentelemetry-semantic-conventions";
-        version = "0.13.0";
+        version = "0.14.0";
         edition = "2021";
-        sha256 = "115wbgk840dklyhpg3lwp4x1m643qd7f0vkz8hmfz0pry4g4yxzm";
-        dependencies = [
-          {
-            name = "opentelemetry";
-            packageId = "opentelemetry";
-            usesDefaultFeatures = false;
-          }
-        ];
+        sha256 = "04197racbkpj75fh9jnwkdznjzv6l2ljpbr8ryfk9f9gqkb5pazr";
 
       };
       "opentelemetry_sdk" = rec {
         crateName = "opentelemetry_sdk";
-        version = "0.21.2";
+        version = "0.22.1";
         edition = "2021";
-        sha256 = "1r7gw2j2n800rd0vdnga32yhlfmc3c4y0sadcr97licam74aw5ig";
+        sha256 = "0zkbkl29qik7cfmwbhr2ncink8fp9vi5x2qgk8gf6jg67c8wg44y";
         dependencies = [
           {
             name = "async-trait";
@@ -7521,11 +7175,12 @@ rec {
             packageId = "rand";
             optional = true;
             usesDefaultFeatures = false;
-            features = [ "std" "std_rng" ];
+            features = [ "std" "std_rng" "small_rng" ];
           }
           {
             name = "thiserror";
             packageId = "thiserror";
+            usesDefaultFeatures = false;
           }
           {
             name = "tokio";
@@ -8131,6 +7786,18 @@ rec {
         ];
 
       };
+      "portable-atomic" = rec {
+        crateName = "portable-atomic";
+        version = "1.6.0";
+        edition = "2018";
+        sha256 = "1h77x9qx7pns0d66vdrmdbmwpi7586h7ysnkdnhrn5mwi2cyyw3i";
+        features = {
+          "critical-section" = [ "dep:critical-section" ];
+          "default" = [ "fallback" ];
+          "serde" = [ "dep:serde" ];
+        };
+        resolvedDefaultFeatures = [ "default" "fallback" ];
+      };
       "powerfmt" = rec {
         crateName = "powerfmt";
         version = "0.2.0";
@@ -8328,35 +7995,7 @@ rec {
         };
         resolvedDefaultFeatures = [ "alloc" "bit-set" "default" "fork" "lazy_static" "regex-syntax" "rusty-fork" "std" "tempfile" "timeout" ];
       };
-      "prost 0.11.9" = rec {
-        crateName = "prost";
-        version = "0.11.9";
-        edition = "2021";
-        sha256 = "1kc1hva2h894hc0zf6r4r8fsxfpazf7xn5rj3jya9sbrsyhym0hb";
-        authors = [
-          "Dan Burkert <dan@danburkert.com>"
-          "Lucio Franco <luciofranco14@gmail.com"
-          "Tokio Contributors <team@tokio.rs>"
-        ];
-        dependencies = [
-          {
-            name = "bytes";
-            packageId = "bytes";
-            usesDefaultFeatures = false;
-          }
-          {
-            name = "prost-derive";
-            packageId = "prost-derive 0.11.9";
-            optional = true;
-          }
-        ];
-        features = {
-          "default" = [ "prost-derive" "std" ];
-          "prost-derive" = [ "dep:prost-derive" ];
-        };
-        resolvedDefaultFeatures = [ "default" "prost-derive" "std" ];
-      };
-      "prost 0.12.3" = rec {
+      "prost" = rec {
         crateName = "prost";
         version = "0.12.3";
         edition = "2021";
@@ -8374,7 +8013,7 @@ rec {
           }
           {
             name = "prost-derive";
-            packageId = "prost-derive 0.12.3";
+            packageId = "prost-derive";
             optional = true;
           }
         ];
@@ -8435,7 +8074,7 @@ rec {
           }
           {
             name = "prost";
-            packageId = "prost 0.12.3";
+            packageId = "prost";
             usesDefaultFeatures = false;
           }
           {
@@ -8486,45 +8125,7 @@ rec {
         };
         resolvedDefaultFeatures = [ "cleanup-markdown" "default" "format" "prettyplease" "pulldown-cmark" "pulldown-cmark-to-cmark" "syn" ];
       };
-      "prost-derive 0.11.9" = rec {
-        crateName = "prost-derive";
-        version = "0.11.9";
-        edition = "2021";
-        sha256 = "1d3mw2s2jba1f7wcjmjd6ha2a255p2rmynxhm1nysv9w1z8xilp5";
-        procMacro = true;
-        authors = [
-          "Dan Burkert <dan@danburkert.com>"
-          "Lucio Franco <luciofranco14@gmail.com>"
-          "Tokio Contributors <team@tokio.rs>"
-        ];
-        dependencies = [
-          {
-            name = "anyhow";
-            packageId = "anyhow";
-          }
-          {
-            name = "itertools";
-            packageId = "itertools 0.10.5";
-            usesDefaultFeatures = false;
-            features = [ "use_alloc" ];
-          }
-          {
-            name = "proc-macro2";
-            packageId = "proc-macro2";
-          }
-          {
-            name = "quote";
-            packageId = "quote";
-          }
-          {
-            name = "syn";
-            packageId = "syn 1.0.109";
-            features = [ "extra-traits" ];
-          }
-        ];
-
-      };
-      "prost-derive 0.12.3" = rec {
+      "prost-derive" = rec {
         crateName = "prost-derive";
         version = "0.12.3";
         edition = "2021";
@@ -8575,7 +8176,7 @@ rec {
         dependencies = [
           {
             name = "prost";
-            packageId = "prost 0.12.3";
+            packageId = "prost";
             usesDefaultFeatures = false;
             features = [ "prost-derive" ];
           }
@@ -8607,7 +8208,7 @@ rec {
           }
           {
             name = "prost";
-            packageId = "prost 0.12.3";
+            packageId = "prost";
           }
           {
             name = "serde";
@@ -8643,7 +8244,7 @@ rec {
           }
           {
             name = "prost";
-            packageId = "prost 0.12.3";
+            packageId = "prost";
           }
           {
             name = "prost-build";
@@ -8677,7 +8278,7 @@ rec {
           }
           {
             name = "prost";
-            packageId = "prost 0.12.3";
+            packageId = "prost";
           }
           {
             name = "prost-wkt";
@@ -8699,7 +8300,7 @@ rec {
         buildDependencies = [
           {
             name = "prost";
-            packageId = "prost 0.12.3";
+            packageId = "prost";
           }
           {
             name = "prost-build";
@@ -9352,21 +8953,21 @@ rec {
           }
           {
             name = "h2";
-            packageId = "h2 0.3.26";
+            packageId = "h2";
             target = { target, features }: (!("wasm32" == target."arch" or null));
           }
           {
             name = "http";
-            packageId = "http 0.2.11";
+            packageId = "http";
           }
           {
             name = "http-body";
-            packageId = "http-body 0.4.6";
+            packageId = "http-body";
             target = { target, features }: (!("wasm32" == target."arch" or null));
           }
           {
             name = "hyper";
-            packageId = "hyper 0.14.28";
+            packageId = "hyper";
             usesDefaultFeatures = false;
             target = { target, features }: (!("wasm32" == target."arch" or null));
             features = [ "tcp" "http1" "http2" "client" "runtime" ];
@@ -9515,7 +9116,7 @@ rec {
         devDependencies = [
           {
             name = "hyper";
-            packageId = "hyper 0.14.28";
+            packageId = "hyper";
             usesDefaultFeatures = false;
             target = { target, features }: (!("wasm32" == target."arch" or null));
             features = [ "tcp" "stream" "http1" "http2" "client" "server" "runtime" ];
@@ -10434,6 +10035,16 @@ rec {
         ];
 
       };
+      "scoped-tls" = rec {
+        crateName = "scoped-tls";
+        version = "1.0.1";
+        edition = "2015";
+        sha256 = "15524h04mafihcvfpgxd8f4bgc3k95aclz8grjkg9a0rxcvn9kz1";
+        authors = [
+          "Alex Crichton <alex@alexcrichton.com>"
+        ];
+
+      };
       "scopeguard" = rec {
         crateName = "scopeguard";
         version = "1.2.0";
@@ -10662,27 +10273,7 @@ rec {
           "preserve_order" = [ "indexmap" "std" ];
           "std" = [ "serde/std" ];
         };
-        resolvedDefaultFeatures = [ "alloc" "default" "raw_value" "std" ];
-      };
-      "serde_path_to_error" = rec {
-        crateName = "serde_path_to_error";
-        version = "0.1.16";
-        edition = "2021";
-        sha256 = "19hlz2359l37ifirskpcds7sxg0gzpqvfilibs7whdys0128i6dg";
-        authors = [
-          "David Tolnay <dtolnay@gmail.com>"
-        ];
-        dependencies = [
-          {
-            name = "itoa";
-            packageId = "itoa";
-          }
-          {
-            name = "serde";
-            packageId = "serde";
-          }
-        ];
-
+        resolvedDefaultFeatures = [ "alloc" "default" "std" ];
       };
       "serde_qs" = rec {
         crateName = "serde_qs";
@@ -11145,7 +10736,6 @@ rec {
           "drain_keep_rest" = [ "drain_filter" ];
           "serde" = [ "dep:serde" ];
         };
-        resolvedDefaultFeatures = [ "const_generics" "const_new" ];
       };
       "smol_str" = rec {
         crateName = "smol_str";
@@ -11743,6 +11333,90 @@ rec {
         ];
         features = { };
       };
+      "threadpool" = rec {
+        crateName = "threadpool";
+        version = "1.8.1";
+        edition = "2015";
+        sha256 = "1amgfyzvynbm8pacniivzq9r0fh3chhs7kijic81j76l6c5ycl6h";
+        authors = [
+          "The Rust Project Developers"
+          "Corey Farwell <coreyf@rwell.org>"
+          "Stefan Schindler <dns2utf8@estada.ch>"
+        ];
+        dependencies = [
+          {
+            name = "num_cpus";
+            packageId = "num_cpus";
+          }
+        ];
+
+      };
+      "tikv-jemalloc-sys" = rec {
+        crateName = "tikv-jemalloc-sys";
+        version = "0.5.4+5.3.0-patched";
+        edition = "2018";
+        links = "jemalloc";
+        sha256 = "1lc5vm1p9dqdvd3mn3264zddnd7z6i95ch3y69prnjgxp0y480ll";
+        authors = [
+          "Alex Crichton <alex@alexcrichton.com>"
+          "Gonzalo Brito Gadeschi <gonzalobg88@gmail.com>"
+          "The TiKV Project Developers"
+        ];
+        dependencies = [
+          {
+            name = "libc";
+            packageId = "libc";
+            usesDefaultFeatures = false;
+          }
+        ];
+        buildDependencies = [
+          {
+            name = "cc";
+            packageId = "cc";
+          }
+        ];
+        features = {
+          "background_threads" = [ "background_threads_runtime_support" ];
+          "default" = [ "background_threads_runtime_support" ];
+        };
+        resolvedDefaultFeatures = [ "background_threads_runtime_support" ];
+      };
+      "tikv-jemallocator" = rec {
+        crateName = "tikv-jemallocator";
+        version = "0.5.4";
+        edition = "2018";
+        sha256 = "1jpanfm9az8hcbg6dyxdabykx03lj0j4g9cbwfa6rig5dg1f0pwn";
+        authors = [
+          "Alex Crichton <alex@alexcrichton.com>"
+          "Gonzalo Brito Gadeschi <gonzalobg88@gmail.com>"
+          "Simon Sapin <simon.sapin@exyr.org>"
+          "Steven Fackler <sfackler@gmail.com>"
+          "The TiKV Project Developers"
+        ];
+        dependencies = [
+          {
+            name = "libc";
+            packageId = "libc";
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "tikv-jemalloc-sys";
+            packageId = "tikv-jemalloc-sys";
+            usesDefaultFeatures = false;
+          }
+        ];
+        features = {
+          "background_threads" = [ "tikv-jemalloc-sys/background_threads" ];
+          "background_threads_runtime_support" = [ "tikv-jemalloc-sys/background_threads_runtime_support" ];
+          "debug" = [ "tikv-jemalloc-sys/debug" ];
+          "default" = [ "background_threads_runtime_support" ];
+          "disable_initial_exec_tls" = [ "tikv-jemalloc-sys/disable_initial_exec_tls" ];
+          "profiling" = [ "tikv-jemalloc-sys/profiling" ];
+          "stats" = [ "tikv-jemalloc-sys/stats" ];
+          "unprefixed_malloc_on_supported_platforms" = [ "tikv-jemalloc-sys/unprefixed_malloc_on_supported_platforms" ];
+        };
+        resolvedDefaultFeatures = [ "background_threads_runtime_support" "default" ];
+      };
       "time" = rec {
         crateName = "time";
         version = "0.3.34";
@@ -12049,16 +11723,11 @@ rec {
       };
       "tokio-listener" = rec {
         crateName = "tokio-listener";
-        version = "0.3.2";
+        version = "0.4.2";
         edition = "2021";
-        sha256 = "00vkr1cywd2agn8jbkzwwf7y4ps3cfjm8l9ab697px2cgc97wdln";
+        sha256 = "1cm6r5dmpq96s8gw9dgsinq5g8s466j48dg7dckwc4gc28g6cd21";
         dependencies = [
           {
-            name = "axum";
-            packageId = "axum 0.7.4";
-            rename = "axum07";
-          }
-          {
             name = "document-features";
             packageId = "document-features";
           }
@@ -12102,7 +11771,7 @@ rec {
           }
           {
             name = "tonic";
-            packageId = "tonic 0.11.0";
+            packageId = "tonic";
             rename = "tonic";
             optional = true;
           }
@@ -12119,12 +11788,14 @@ rec {
           }
         ];
         features = {
-          "axum07" = [ "dep:hyper1" "dep:hyper-util" "dep:futures-util" "dep:tower-service" "dep:tower" ];
+          "axum07" = [ "dep:hyper1" "dep:hyper-util" "dep:futures-util" "dep:tower-service" "dep:tower" "dep:axum07" ];
           "clap" = [ "dep:clap" ];
           "default" = [ "user_facing_default" "tokio-util" ];
           "hyper014" = [ "dep:hyper014" ];
           "inetd" = [ "dep:futures-util" ];
+          "multi-listener" = [ "dep:futures-util" ];
           "nix" = [ "dep:nix" ];
+          "sd_listen" = [ "socket2" ];
           "serde" = [ "dep:serde" "serde_with" ];
           "serde_with" = [ "dep:serde_with" ];
           "socket2" = [ "dep:socket2" ];
@@ -12579,7 +12250,7 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "serde" ];
       };
-      "tonic 0.11.0" = rec {
+      "tonic" = rec {
         crateName = "tonic";
         version = "0.11.0";
         edition = "2021";
@@ -12600,7 +12271,7 @@ rec {
           }
           {
             name = "axum";
-            packageId = "axum 0.6.20";
+            packageId = "axum";
             optional = true;
             usesDefaultFeatures = false;
           }
@@ -12614,20 +12285,20 @@ rec {
           }
           {
             name = "h2";
-            packageId = "h2 0.3.26";
+            packageId = "h2";
             optional = true;
           }
           {
             name = "http";
-            packageId = "http 0.2.11";
+            packageId = "http";
           }
           {
             name = "http-body";
-            packageId = "http-body 0.4.6";
+            packageId = "http-body";
           }
           {
             name = "hyper";
-            packageId = "hyper 0.14.28";
+            packageId = "hyper";
             optional = true;
             features = [ "full" ];
           }
@@ -12646,7 +12317,7 @@ rec {
           }
           {
             name = "prost";
-            packageId = "prost 0.12.3";
+            packageId = "prost";
             optional = true;
             usesDefaultFeatures = false;
             features = [ "std" ];
@@ -12725,139 +12396,6 @@ rec {
         };
         resolvedDefaultFeatures = [ "channel" "codegen" "default" "prost" "tls" "tls-roots" "tls-roots-common" "transport" ];
       };
-      "tonic 0.9.2" = rec {
-        crateName = "tonic";
-        version = "0.9.2";
-        edition = "2021";
-        sha256 = "0nlx35lvah5hdcp6lg1d6dlprq0zz8ijj6f727szfcv479m6d0ih";
-        authors = [
-          "Lucio Franco <luciofranco14@gmail.com>"
-        ];
-        dependencies = [
-          {
-            name = "async-trait";
-            packageId = "async-trait";
-            optional = true;
-          }
-          {
-            name = "axum";
-            packageId = "axum 0.6.20";
-            optional = true;
-            usesDefaultFeatures = false;
-          }
-          {
-            name = "base64";
-            packageId = "base64";
-          }
-          {
-            name = "bytes";
-            packageId = "bytes";
-          }
-          {
-            name = "futures-core";
-            packageId = "futures-core";
-            usesDefaultFeatures = false;
-          }
-          {
-            name = "futures-util";
-            packageId = "futures-util";
-            usesDefaultFeatures = false;
-          }
-          {
-            name = "h2";
-            packageId = "h2 0.3.26";
-            optional = true;
-          }
-          {
-            name = "http";
-            packageId = "http 0.2.11";
-          }
-          {
-            name = "http-body";
-            packageId = "http-body 0.4.6";
-          }
-          {
-            name = "hyper";
-            packageId = "hyper 0.14.28";
-            optional = true;
-            features = [ "full" ];
-          }
-          {
-            name = "hyper-timeout";
-            packageId = "hyper-timeout";
-            optional = true;
-          }
-          {
-            name = "percent-encoding";
-            packageId = "percent-encoding";
-          }
-          {
-            name = "pin-project";
-            packageId = "pin-project";
-          }
-          {
-            name = "prost";
-            packageId = "prost 0.11.9";
-            optional = true;
-            usesDefaultFeatures = false;
-            features = [ "std" ];
-          }
-          {
-            name = "tokio";
-            packageId = "tokio";
-            optional = true;
-            features = [ "net" "time" "macros" ];
-          }
-          {
-            name = "tokio-stream";
-            packageId = "tokio-stream";
-          }
-          {
-            name = "tower";
-            packageId = "tower";
-            optional = true;
-            usesDefaultFeatures = false;
-            features = [ "balance" "buffer" "discover" "limit" "load" "make" "timeout" "util" ];
-          }
-          {
-            name = "tower-layer";
-            packageId = "tower-layer";
-          }
-          {
-            name = "tower-service";
-            packageId = "tower-service";
-          }
-          {
-            name = "tracing";
-            packageId = "tracing";
-          }
-        ];
-        devDependencies = [
-          {
-            name = "tokio";
-            packageId = "tokio";
-            features = [ "rt" "macros" ];
-          }
-          {
-            name = "tower";
-            packageId = "tower";
-            features = [ "full" ];
-          }
-        ];
-        features = {
-          "channel" = [ "dep:h2" "dep:hyper" "dep:tokio" "dep:tower" "dep:hyper-timeout" ];
-          "codegen" = [ "dep:async-trait" ];
-          "default" = [ "transport" "codegen" "prost" ];
-          "gzip" = [ "dep:flate2" ];
-          "prost" = [ "dep:prost" ];
-          "tls" = [ "dep:rustls-pemfile" "transport" "dep:tokio-rustls" "dep:async-stream" ];
-          "tls-roots" = [ "tls-roots-common" "dep:rustls-native-certs" ];
-          "tls-roots-common" = [ "tls" ];
-          "tls-webpki-roots" = [ "tls-roots-common" "dep:webpki-roots" ];
-          "transport" = [ "dep:axum" "channel" ];
-        };
-        resolvedDefaultFeatures = [ "channel" "codegen" "default" "prost" "transport" ];
-      };
       "tonic-build" = rec {
         crateName = "tonic-build";
         version = "0.11.0";
@@ -12909,7 +12447,7 @@ rec {
         dependencies = [
           {
             name = "prost";
-            packageId = "prost 0.12.3";
+            packageId = "prost";
           }
           {
             name = "prost-types";
@@ -12930,7 +12468,7 @@ rec {
           }
           {
             name = "tonic";
-            packageId = "tonic 0.11.0";
+            packageId = "tonic";
             usesDefaultFeatures = false;
             features = [ "codegen" "prost" ];
           }
@@ -12938,7 +12476,7 @@ rec {
         devDependencies = [
           {
             name = "tonic";
-            packageId = "tonic 0.11.0";
+            packageId = "tonic";
             usesDefaultFeatures = false;
             features = [ "transport" ];
           }
@@ -13071,6 +12609,106 @@ rec {
         };
         resolvedDefaultFeatures = [ "__common" "balance" "buffer" "default" "discover" "futures-core" "futures-util" "indexmap" "limit" "load" "log" "make" "pin-project" "pin-project-lite" "rand" "ready-cache" "slab" "timeout" "tokio" "tokio-util" "tracing" "util" ];
       };
+      "tower-http" = rec {
+        crateName = "tower-http";
+        version = "0.4.4";
+        edition = "2018";
+        sha256 = "0h0i2flrw25zwxv72sifq4v5mwcb030spksy7r2a4xl2d4fvpib1";
+        authors = [
+          "Tower Maintainers <team@tower-rs.com>"
+        ];
+        dependencies = [
+          {
+            name = "bitflags";
+            packageId = "bitflags 2.4.2";
+          }
+          {
+            name = "bytes";
+            packageId = "bytes";
+          }
+          {
+            name = "futures-core";
+            packageId = "futures-core";
+          }
+          {
+            name = "futures-util";
+            packageId = "futures-util";
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "http";
+            packageId = "http";
+          }
+          {
+            name = "http-body";
+            packageId = "http-body";
+          }
+          {
+            name = "http-range-header";
+            packageId = "http-range-header";
+          }
+          {
+            name = "pin-project-lite";
+            packageId = "pin-project-lite";
+          }
+          {
+            name = "tower-layer";
+            packageId = "tower-layer";
+          }
+          {
+            name = "tower-service";
+            packageId = "tower-service";
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+            optional = true;
+            usesDefaultFeatures = false;
+          }
+        ];
+        devDependencies = [
+          {
+            name = "bytes";
+            packageId = "bytes";
+          }
+        ];
+        features = {
+          "async-compression" = [ "dep:async-compression" ];
+          "auth" = [ "base64" "validate-request" ];
+          "base64" = [ "dep:base64" ];
+          "catch-panic" = [ "tracing" "futures-util/std" ];
+          "compression-br" = [ "async-compression/brotli" "tokio-util" "tokio" ];
+          "compression-deflate" = [ "async-compression/zlib" "tokio-util" "tokio" ];
+          "compression-full" = [ "compression-br" "compression-deflate" "compression-gzip" "compression-zstd" ];
+          "compression-gzip" = [ "async-compression/gzip" "tokio-util" "tokio" ];
+          "compression-zstd" = [ "async-compression/zstd" "tokio-util" "tokio" ];
+          "decompression-br" = [ "async-compression/brotli" "tokio-util" "tokio" ];
+          "decompression-deflate" = [ "async-compression/zlib" "tokio-util" "tokio" ];
+          "decompression-full" = [ "decompression-br" "decompression-deflate" "decompression-gzip" "decompression-zstd" ];
+          "decompression-gzip" = [ "async-compression/gzip" "tokio-util" "tokio" ];
+          "decompression-zstd" = [ "async-compression/zstd" "tokio-util" "tokio" ];
+          "follow-redirect" = [ "iri-string" "tower/util" ];
+          "fs" = [ "tokio/fs" "tokio-util/io" "tokio/io-util" "mime_guess" "mime" "percent-encoding" "httpdate" "set-status" "futures-util/alloc" "tracing" ];
+          "full" = [ "add-extension" "auth" "catch-panic" "compression-full" "cors" "decompression-full" "follow-redirect" "fs" "limit" "map-request-body" "map-response-body" "metrics" "normalize-path" "propagate-header" "redirect" "request-id" "sensitive-headers" "set-header" "set-status" "timeout" "trace" "util" "validate-request" ];
+          "httpdate" = [ "dep:httpdate" ];
+          "iri-string" = [ "dep:iri-string" ];
+          "metrics" = [ "tokio/time" ];
+          "mime" = [ "dep:mime" ];
+          "mime_guess" = [ "dep:mime_guess" ];
+          "percent-encoding" = [ "dep:percent-encoding" ];
+          "request-id" = [ "uuid" ];
+          "timeout" = [ "tokio/time" ];
+          "tokio" = [ "dep:tokio" ];
+          "tokio-util" = [ "dep:tokio-util" ];
+          "tower" = [ "dep:tower" ];
+          "trace" = [ "tracing" ];
+          "tracing" = [ "dep:tracing" ];
+          "util" = [ "tower" ];
+          "uuid" = [ "dep:uuid" ];
+          "validate-request" = [ "mime" ];
+        };
+        resolvedDefaultFeatures = [ "default" "trace" "tracing" ];
+      };
       "tower-layer" = rec {
         crateName = "tower-layer";
         version = "0.3.2";
@@ -13136,7 +12774,7 @@ rec {
           "tracing-attributes" = [ "dep:tracing-attributes" ];
           "valuable" = [ "tracing-core/valuable" ];
         };
-        resolvedDefaultFeatures = [ "attributes" "default" "log" "max_level_trace" "release_max_level_info" "std" "tracing-attributes" ];
+        resolvedDefaultFeatures = [ "attributes" "default" "log" "max_level_trace" "release_max_level_debug" "std" "tracing-attributes" ];
       };
       "tracing-attributes" = rec {
         crateName = "tracing-attributes";
@@ -13233,6 +12871,33 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "pin-project" "std" "std-future" ];
       };
+      "tracing-indicatif" = rec {
+        crateName = "tracing-indicatif";
+        version = "0.3.6";
+        edition = "2021";
+        sha256 = "07cmn4ilw8hdfzc1mirccwkgl160k3x9fhgg7xydj4gy9r181586";
+        libName = "tracing_indicatif";
+        dependencies = [
+          {
+            name = "indicatif";
+            packageId = "indicatif";
+            features = [ "in_memory" ];
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+          }
+          {
+            name = "tracing-core";
+            packageId = "tracing-core";
+          }
+          {
+            name = "tracing-subscriber";
+            packageId = "tracing-subscriber";
+          }
+        ];
+
+      };
       "tracing-log" = rec {
         crateName = "tracing-log";
         version = "0.2.0";
@@ -13266,9 +12931,9 @@ rec {
       };
       "tracing-opentelemetry" = rec {
         crateName = "tracing-opentelemetry";
-        version = "0.22.0";
+        version = "0.23.0";
         edition = "2018";
-        sha256 = "15jmwmbp6wz15bx20bfsmabx53wmlnd7wvzwz9hvkrq7aifc4yn6";
+        sha256 = "1112kmckw0qwyckhbwarb230n4ldmfgzixr9jagbfjmy3fx19gm9";
         authors = [
           "Julian Tescher <julian@tescher.me>"
           "Tokio Contributors <team@tokio.rs>"
@@ -13356,6 +13021,7 @@ rec {
         features = {
           "async-trait" = [ "dep:async-trait" ];
           "default" = [ "tracing-log" "metrics" ];
+          "futures-util" = [ "dep:futures-util" ];
           "metrics" = [ "opentelemetry/metrics" "opentelemetry_sdk/metrics" "smallvec" ];
           "smallvec" = [ "dep:smallvec" ];
           "thiserror" = [ "dep:thiserror" ];
@@ -13363,30 +13029,6 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "metrics" "smallvec" "tracing-log" ];
       };
-      "tracing-serde" = rec {
-        crateName = "tracing-serde";
-        version = "0.1.3";
-        edition = "2018";
-        sha256 = "1qfr0va69djvxqvjrx4vqq7p6myy414lx4w1f6amcn0hfwqj2sxw";
-        authors = [
-          "Tokio Contributors <team@tokio.rs>"
-        ];
-        dependencies = [
-          {
-            name = "serde";
-            packageId = "serde";
-          }
-          {
-            name = "tracing-core";
-            packageId = "tracing-core";
-          }
-        ];
-        features = {
-          "valuable" = [ "valuable_crate" "valuable-serde" "tracing-core/valuable" ];
-          "valuable-serde" = [ "dep:valuable-serde" ];
-          "valuable_crate" = [ "dep:valuable_crate" ];
-        };
-      };
       "tracing-subscriber" = rec {
         crateName = "tracing-subscriber";
         version = "0.3.18";
@@ -13421,16 +13063,6 @@ rec {
             features = [ "std" "unicode-case" "unicode-perl" ];
           }
           {
-            name = "serde";
-            packageId = "serde";
-            optional = true;
-          }
-          {
-            name = "serde_json";
-            packageId = "serde_json";
-            optional = true;
-          }
-          {
             name = "sharded-slab";
             packageId = "sharded-slab";
             optional = true;
@@ -13463,11 +13095,6 @@ rec {
             usesDefaultFeatures = false;
             features = [ "log-tracer" "std" ];
           }
-          {
-            name = "tracing-serde";
-            packageId = "tracing-serde";
-            optional = true;
-          }
         ];
         devDependencies = [
           {
@@ -13513,7 +13140,121 @@ rec {
           "valuable-serde" = [ "dep:valuable-serde" ];
           "valuable_crate" = [ "dep:valuable_crate" ];
         };
-        resolvedDefaultFeatures = [ "alloc" "ansi" "default" "env-filter" "fmt" "json" "matchers" "nu-ansi-term" "once_cell" "regex" "registry" "serde" "serde_json" "sharded-slab" "smallvec" "std" "thread_local" "tracing" "tracing-log" "tracing-serde" ];
+        resolvedDefaultFeatures = [ "alloc" "ansi" "default" "env-filter" "fmt" "matchers" "nu-ansi-term" "once_cell" "regex" "registry" "sharded-slab" "smallvec" "std" "thread_local" "tracing" "tracing-log" ];
+      };
+      "tracing-tracy" = rec {
+        crateName = "tracing-tracy";
+        version = "0.11.0";
+        edition = "2021";
+        sha256 = "051nf1ncp1z45acgk0h5199ldlhb0wmkmvhyvk8x17x6hi7x0930";
+        authors = [
+          "Simonas Kazlauskas <tracing-tracy@kazlauskas.me>"
+        ];
+        dependencies = [
+          {
+            name = "tracing-core";
+            packageId = "tracing-core";
+            usesDefaultFeatures = false;
+            features = [ "std" ];
+          }
+          {
+            name = "tracing-subscriber";
+            packageId = "tracing-subscriber";
+            usesDefaultFeatures = false;
+            features = [ "fmt" "registry" ];
+          }
+          {
+            name = "tracy-client";
+            packageId = "tracy-client";
+            rename = "client";
+            usesDefaultFeatures = false;
+          }
+        ];
+        features = {
+          "broadcast" = [ "client/broadcast" ];
+          "callstack-inlines" = [ "client/callstack-inlines" ];
+          "code-transfer" = [ "client/code-transfer" ];
+          "context-switch-tracing" = [ "client/context-switch-tracing" ];
+          "default" = [ "enable" "system-tracing" "context-switch-tracing" "sampling" "code-transfer" "broadcast" "callstack-inlines" ];
+          "delayed-init" = [ "client/delayed-init" ];
+          "enable" = [ "client/enable" ];
+          "fibers" = [ "client/fibers" ];
+          "flush-on-exit" = [ "client/flush-on-exit" ];
+          "manual-lifetime" = [ "client/manual-lifetime" ];
+          "ondemand" = [ "client/ondemand" ];
+          "only-ipv4" = [ "client/only-ipv4" ];
+          "only-localhost" = [ "client/only-localhost" ];
+          "sampling" = [ "client/sampling" ];
+          "system-tracing" = [ "client/system-tracing" ];
+          "timer-fallback" = [ "client/timer-fallback" ];
+        };
+        resolvedDefaultFeatures = [ "broadcast" "callstack-inlines" "code-transfer" "context-switch-tracing" "default" "enable" "flush-on-exit" "sampling" "system-tracing" ];
+      };
+      "tracy-client" = rec {
+        crateName = "tracy-client";
+        version = "0.17.0";
+        edition = "2021";
+        sha256 = "07bla4iigg17fl4zil2dwizslhw8mv8vrsfkhr7ri27zchd97ysr";
+        authors = [
+          "Simonas Kazlauskas <tracy-client@kazlauskas.me>"
+        ];
+        dependencies = [
+          {
+            name = "loom";
+            packageId = "loom";
+            target = { target, features }: (target."loom" or false);
+          }
+          {
+            name = "once_cell";
+            packageId = "once_cell";
+          }
+          {
+            name = "tracy-client-sys";
+            packageId = "tracy-client-sys";
+            rename = "sys";
+            usesDefaultFeatures = false;
+          }
+        ];
+        features = {
+          "broadcast" = [ "sys/broadcast" ];
+          "callstack-inlines" = [ "sys/callstack-inlines" ];
+          "code-transfer" = [ "sys/code-transfer" ];
+          "context-switch-tracing" = [ "sys/context-switch-tracing" ];
+          "default" = [ "enable" "system-tracing" "context-switch-tracing" "sampling" "code-transfer" "broadcast" "callstack-inlines" ];
+          "delayed-init" = [ "sys/delayed-init" ];
+          "enable" = [ "sys/enable" ];
+          "fibers" = [ "sys/fibers" ];
+          "flush-on-exit" = [ "sys/flush-on-exit" ];
+          "manual-lifetime" = [ "sys/manual-lifetime" ];
+          "ondemand" = [ "sys/ondemand" ];
+          "only-ipv4" = [ "sys/only-ipv4" ];
+          "only-localhost" = [ "sys/only-localhost" ];
+          "sampling" = [ "sys/sampling" ];
+          "system-tracing" = [ "sys/system-tracing" ];
+          "timer-fallback" = [ "sys/timer-fallback" ];
+        };
+        resolvedDefaultFeatures = [ "broadcast" "callstack-inlines" "code-transfer" "context-switch-tracing" "enable" "flush-on-exit" "sampling" "system-tracing" ];
+      };
+      "tracy-client-sys" = rec {
+        crateName = "tracy-client-sys";
+        version = "0.22.2";
+        edition = "2021";
+        sha256 = "10h8msq85b7rhfg2vg22g2iizbk4c6fcq0jiadad37gs1mhls44x";
+        authors = [
+          "Simonas Kazlauskas <tracy-client-sys@kazlauskas.me>"
+        ];
+        buildDependencies = [
+          {
+            name = "cc";
+            packageId = "cc";
+            usesDefaultFeatures = false;
+          }
+        ];
+        features = {
+          "default" = [ "enable" "system-tracing" "context-switch-tracing" "sampling" "code-transfer" "broadcast" "callstack-inlines" ];
+          "manual-lifetime" = [ "delayed-init" ];
+        };
+        resolvedDefaultFeatures = [ "broadcast" "callstack-inlines" "code-transfer" "context-switch-tracing" "enable" "flush-on-exit" "sampling" "system-tracing" ];
       };
       "try-lock" = rec {
         crateName = "try-lock";
@@ -13558,7 +13299,7 @@ rec {
           }
           {
             name = "prost";
-            packageId = "prost 0.12.3";
+            packageId = "prost";
           }
           {
             name = "thiserror";
@@ -13575,7 +13316,7 @@ rec {
           }
           {
             name = "tonic";
-            packageId = "tonic 0.11.0";
+            packageId = "tonic";
             features = [ "tls" "tls-roots" ];
           }
           {
@@ -13588,15 +13329,14 @@ rec {
             packageId = "tracing";
           }
           {
-            name = "tracing-subscriber";
-            packageId = "tracing-subscriber";
-            features = [ "json" ];
-          }
-          {
             name = "tvix-castore";
             packageId = "tvix-castore";
           }
           {
+            name = "tvix-tracing";
+            packageId = "tvix-tracing";
+          }
+          {
             name = "url";
             packageId = "url";
           }
@@ -13618,7 +13358,7 @@ rec {
           }
         ];
         features = {
-          "tonic-reflection" = [ "dep:tonic-reflection" ];
+          "tonic-reflection" = [ "dep:tonic-reflection" "tvix-castore/tonic-reflection" ];
         };
         resolvedDefaultFeatures = [ "default" "tonic-reflection" ];
       };
@@ -13634,6 +13374,11 @@ rec {
           else ./castore;
         dependencies = [
           {
+            name = "async-compression";
+            packageId = "async-compression";
+            features = [ "tokio" "zstd" ];
+          }
+          {
             name = "async-stream";
             packageId = "async-stream";
           }
@@ -13709,7 +13454,7 @@ rec {
           }
           {
             name = "prost";
-            packageId = "prost 0.12.3";
+            packageId = "prost";
           }
           {
             name = "serde";
@@ -13733,6 +13478,11 @@ rec {
             packageId = "thiserror";
           }
           {
+            name = "threadpool";
+            packageId = "threadpool";
+            optional = true;
+          }
+          {
             name = "tokio";
             packageId = "tokio";
             features = [ "fs" "macros" "net" "rt" "rt-multi-thread" "signal" ];
@@ -13749,11 +13499,11 @@ rec {
           {
             name = "tokio-util";
             packageId = "tokio-util";
-            features = [ "io" "io-util" ];
+            features = [ "io" "io-util" "codec" ];
           }
           {
             name = "tonic";
-            packageId = "tonic 0.11.0";
+            packageId = "tonic";
           }
           {
             name = "tonic-reflection";
@@ -13769,6 +13519,15 @@ rec {
             packageId = "tracing";
           }
           {
+            name = "tracing-indicatif";
+            packageId = "tracing-indicatif";
+          }
+          {
+            name = "tvix-tracing";
+            packageId = "tvix-tracing";
+            features = [ "tonic" ];
+          }
+          {
             name = "url";
             packageId = "url";
           }
@@ -13853,7 +13612,8 @@ rec {
         ];
         features = {
           "cloud" = [ "dep:bigtable_rs" "object_store/aws" "object_store/azure" "object_store/gcp" ];
-          "fs" = [ "dep:libc" "dep:fuse-backend-rs" ];
+          "default" = [ "cloud" ];
+          "fs" = [ "dep:fuse-backend-rs" "dep:threadpool" "dep:libc" ];
           "fuse" = [ "fs" ];
           "tonic-reflection" = [ "dep:tonic-reflection" ];
           "virtiofs" = [ "fs" "dep:vhost" "dep:vhost-user-backend" "dep:virtio-queue" "dep:vm-memory" "dep:vmm-sys-util" "dep:virtio-bindings" "fuse-backend-rs?/vhost-user-fs" "fuse-backend-rs?/virtiofs" ];
@@ -13896,6 +13656,10 @@ rec {
             packageId = "nix-compat";
           }
           {
+            name = "rnix";
+            packageId = "rnix";
+          }
+          {
             name = "rustyline";
             packageId = "rustyline";
           }
@@ -13904,18 +13668,21 @@ rec {
             packageId = "thiserror";
           }
           {
+            name = "tikv-jemallocator";
+            packageId = "tikv-jemallocator";
+            target = { target, features }: (!("msvc" == target."env" or null));
+          }
+          {
             name = "tokio";
             packageId = "tokio";
           }
           {
             name = "tracing";
             packageId = "tracing";
-            features = [ "max_level_trace" "release_max_level_info" ];
           }
           {
-            name = "tracing-subscriber";
-            packageId = "tracing-subscriber";
-            features = [ "json" ];
+            name = "tracing-indicatif";
+            packageId = "tracing-indicatif";
           }
           {
             name = "tvix-build";
@@ -13939,11 +13706,18 @@ rec {
             usesDefaultFeatures = false;
           }
           {
+            name = "tvix-tracing";
+            packageId = "tvix-tracing";
+          }
+          {
             name = "wu-manber";
             packageId = "wu-manber";
           }
         ];
-
+        features = {
+          "tracy" = [ "tvix-tracing/tracy" ];
+        };
+        resolvedDefaultFeatures = [ "default" "tracy" ];
       };
       "tvix-eval" = rec {
         crateName = "tvix-eval";
@@ -14076,10 +13850,6 @@ rec {
             packageId = "tvix-eval-builtin-macros";
             rename = "builtin-macros";
           }
-          {
-            name = "xml-rs";
-            packageId = "xml-rs";
-          }
         ];
         devDependencies = [
           {
@@ -14102,6 +13872,11 @@ rec {
             name = "tempfile";
             packageId = "tempfile";
           }
+          {
+            name = "tikv-jemallocator";
+            packageId = "tikv-jemallocator";
+            target = { target, features }: (!("msvc" == target."env" or null));
+          }
         ];
         features = {
           "arbitrary" = [ "proptest" "test-strategy" "imbl/proptest" ];
@@ -14223,6 +13998,11 @@ rec {
             packageId = "thiserror";
           }
           {
+            name = "tikv-jemallocator";
+            packageId = "tikv-jemallocator";
+            target = { target, features }: (!("msvc" == target."env" or null));
+          }
+          {
             name = "tokio";
             packageId = "tokio";
           }
@@ -14240,6 +14020,10 @@ rec {
             packageId = "tracing";
           }
           {
+            name = "tracing-indicatif";
+            packageId = "tracing-indicatif";
+          }
+          {
             name = "tvix-build";
             packageId = "tvix-build";
             usesDefaultFeatures = false;
@@ -14258,6 +14042,10 @@ rec {
             usesDefaultFeatures = false;
           }
           {
+            name = "tvix-tracing";
+            packageId = "tvix-tracing";
+          }
+          {
             name = "url";
             packageId = "url";
           }
@@ -14415,22 +14203,6 @@ rec {
             features = [ "async" ];
           }
           {
-            name = "opentelemetry";
-            packageId = "opentelemetry";
-            optional = true;
-          }
-          {
-            name = "opentelemetry-otlp";
-            packageId = "opentelemetry-otlp";
-            optional = true;
-          }
-          {
-            name = "opentelemetry_sdk";
-            packageId = "opentelemetry_sdk";
-            optional = true;
-            features = [ "rt-tokio" ];
-          }
-          {
             name = "parking_lot";
             packageId = "parking_lot 0.12.2";
           }
@@ -14440,7 +14212,7 @@ rec {
           }
           {
             name = "prost";
-            packageId = "prost 0.12.3";
+            packageId = "prost";
           }
           {
             name = "reqwest";
@@ -14499,7 +14271,7 @@ rec {
           }
           {
             name = "tonic";
-            packageId = "tonic 0.11.0";
+            packageId = "tonic";
             features = [ "tls" "tls-roots" ];
           }
           {
@@ -14512,23 +14284,28 @@ rec {
             packageId = "tower";
           }
           {
-            name = "tracing";
-            packageId = "tracing";
+            name = "tower-http";
+            packageId = "tower-http";
+            features = [ "trace" ];
           }
           {
-            name = "tracing-opentelemetry";
-            packageId = "tracing-opentelemetry";
+            name = "tracing";
+            packageId = "tracing";
           }
           {
-            name = "tracing-subscriber";
-            packageId = "tracing-subscriber";
-            features = [ "env-filter" "json" ];
+            name = "tracing-indicatif";
+            packageId = "tracing-indicatif";
           }
           {
             name = "tvix-castore";
             packageId = "tvix-castore";
           }
           {
+            name = "tvix-tracing";
+            packageId = "tvix-tracing";
+            features = [ "tonic" ];
+          }
+          {
             name = "url";
             packageId = "url";
           }
@@ -14573,11 +14350,104 @@ rec {
           "cloud" = [ "dep:bigtable_rs" "tvix-castore/cloud" ];
           "default" = [ "cloud" "fuse" "otlp" "tonic-reflection" ];
           "fuse" = [ "tvix-castore/fuse" ];
-          "otlp" = [ "dep:opentelemetry" "dep:opentelemetry-otlp" "dep:opentelemetry_sdk" ];
+          "otlp" = [ "tvix-tracing/otlp" ];
           "tonic-reflection" = [ "dep:tonic-reflection" "tvix-castore/tonic-reflection" ];
+          "tracy" = [ "tvix-tracing/tracy" ];
           "virtiofs" = [ "tvix-castore/virtiofs" ];
         };
-        resolvedDefaultFeatures = [ "cloud" "default" "fuse" "integration" "otlp" "tonic-reflection" "virtiofs" ];
+        resolvedDefaultFeatures = [ "cloud" "default" "fuse" "integration" "otlp" "tonic-reflection" "tracy" "virtiofs" ];
+      };
+      "tvix-tracing" = rec {
+        crateName = "tvix-tracing";
+        version = "0.1.0";
+        edition = "2021";
+        # We can't filter paths with references in Nix 2.4
+        # See https://github.com/NixOS/nix/issues/5410
+        src =
+          if ((lib.versionOlder builtins.nixVersion "2.4pre20211007") || (lib.versionOlder "2.5" builtins.nixVersion))
+          then lib.cleanSourceWith { filter = sourceFilter; src = ./tracing; }
+          else ./tracing;
+        dependencies = [
+          {
+            name = "http";
+            packageId = "http";
+            optional = true;
+          }
+          {
+            name = "indicatif";
+            packageId = "indicatif";
+          }
+          {
+            name = "lazy_static";
+            packageId = "lazy_static";
+          }
+          {
+            name = "opentelemetry";
+            packageId = "opentelemetry";
+            optional = true;
+          }
+          {
+            name = "opentelemetry-http";
+            packageId = "opentelemetry-http";
+            optional = true;
+          }
+          {
+            name = "opentelemetry-otlp";
+            packageId = "opentelemetry-otlp";
+            optional = true;
+          }
+          {
+            name = "opentelemetry_sdk";
+            packageId = "opentelemetry_sdk";
+            optional = true;
+            features = [ "rt-tokio" ];
+          }
+          {
+            name = "thiserror";
+            packageId = "thiserror";
+          }
+          {
+            name = "tokio";
+            packageId = "tokio";
+            features = [ "sync" "rt" ];
+          }
+          {
+            name = "tonic";
+            packageId = "tonic";
+            optional = true;
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+            features = [ "max_level_trace" "release_max_level_debug" ];
+          }
+          {
+            name = "tracing-indicatif";
+            packageId = "tracing-indicatif";
+          }
+          {
+            name = "tracing-opentelemetry";
+            packageId = "tracing-opentelemetry";
+            optional = true;
+          }
+          {
+            name = "tracing-subscriber";
+            packageId = "tracing-subscriber";
+            features = [ "env-filter" ];
+          }
+          {
+            name = "tracing-tracy";
+            packageId = "tracing-tracy";
+            optional = true;
+            features = [ "flush-on-exit" ];
+          }
+        ];
+        features = {
+          "otlp" = [ "dep:tracing-opentelemetry" "dep:opentelemetry" "dep:opentelemetry-otlp" "dep:opentelemetry_sdk" "dep:opentelemetry-http" ];
+          "tonic" = [ "dep:tonic" "dep:http" ];
+          "tracy" = [ "dep:tracing-tracy" ];
+        };
+        resolvedDefaultFeatures = [ "default" "otlp" "tonic" "tracy" ];
       };
       "typenum" = rec {
         crateName = "typenum";
@@ -15100,6 +14970,97 @@ rec {
           "with-serde" = [ "serde" "serde_derive" ];
         };
       };
+      "vt100" = rec {
+        crateName = "vt100";
+        version = "0.15.2";
+        edition = "2021";
+        sha256 = "1pklc8y984axmxr0cd363srr2d27wd5rj15xlcmkjznvy0xqdkc4";
+        authors = [
+          "Jesse Luehrs <doy@tozt.net>"
+        ];
+        dependencies = [
+          {
+            name = "itoa";
+            packageId = "itoa";
+          }
+          {
+            name = "log";
+            packageId = "log";
+          }
+          {
+            name = "unicode-width";
+            packageId = "unicode-width";
+          }
+          {
+            name = "vte";
+            packageId = "vte";
+          }
+        ];
+        devDependencies = [
+          {
+            name = "vte";
+            packageId = "vte";
+          }
+        ];
+
+      };
+      "vte" = rec {
+        crateName = "vte";
+        version = "0.11.1";
+        edition = "2021";
+        sha256 = "15r1ff4j8ndqj9vsyil3wqwxhhl7jsz5g58f31n0h1wlpxgjn0pm";
+        authors = [
+          "Joe Wilm <joe@jwilm.com>"
+          "Christian Duerr <contact@christianduerr.com>"
+        ];
+        dependencies = [
+          {
+            name = "arrayvec";
+            packageId = "arrayvec";
+            optional = true;
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "utf8parse";
+            packageId = "utf8parse";
+          }
+          {
+            name = "vte_generate_state_changes";
+            packageId = "vte_generate_state_changes";
+          }
+        ];
+        features = {
+          "ansi" = [ "log" ];
+          "arrayvec" = [ "dep:arrayvec" ];
+          "default" = [ "no_std" ];
+          "log" = [ "dep:log" ];
+          "nightly" = [ "utf8parse/nightly" ];
+          "no_std" = [ "arrayvec" ];
+          "serde" = [ "dep:serde" ];
+        };
+        resolvedDefaultFeatures = [ "arrayvec" "default" "no_std" ];
+      };
+      "vte_generate_state_changes" = rec {
+        crateName = "vte_generate_state_changes";
+        version = "0.1.1";
+        edition = "2018";
+        sha256 = "1zs5q766q7jmc80c5c80gpzy4qpg5lnydf94mgdzrpy7h5q82myj";
+        procMacro = true;
+        authors = [
+          "Christian Duerr <contact@christianduerr.com>"
+        ];
+        dependencies = [
+          {
+            name = "proc-macro2";
+            packageId = "proc-macro2";
+          }
+          {
+            name = "quote";
+            packageId = "quote";
+          }
+        ];
+
+      };
       "wait-timeout" = rec {
         crateName = "wait-timeout";
         version = "0.2.0";
@@ -15866,23 +15827,25 @@ rec {
       };
       "web-time" = rec {
         crateName = "web-time";
-        version = "0.2.4";
+        version = "1.1.0";
         edition = "2021";
-        sha256 = "1q6gk0nkwbfz30g1pz8g52mq00zjx7m5im36k3474aw73jdh8c5a";
+        sha256 = "1fx05yqx83dhx628wb70fyy10yjfq1jpl20qfqhdkymi13rq0ras";
         dependencies = [
           {
             name = "js-sys";
             packageId = "js-sys";
-            target = { target, features }: ((builtins.elem "wasm" target."family") && (!(("emscripten" == target."os" or null) || ("wasi" == target."os" or null))));
+            target = { target, features }: ((builtins.elem "wasm" target."family") && ("unknown" == target."os" or null));
           }
           {
             name = "wasm-bindgen";
             packageId = "wasm-bindgen";
             usesDefaultFeatures = false;
-            target = { target, features }: ((builtins.elem "wasm" target."family") && (!(("emscripten" == target."os" or null) || ("wasi" == target."os" or null))));
+            target = { target, features }: ((builtins.elem "wasm" target."family") && ("unknown" == target."os" or null));
           }
         ];
-
+        features = {
+          "serde" = [ "dep:serde" ];
+        };
       };
       "which 4.4.2" = rec {
         crateName = "which";
@@ -16021,7 +15984,717 @@ rec {
         ];
 
       };
-      "windows-core" = rec {
+      "windows" = rec {
+        crateName = "windows";
+        version = "0.54.0";
+        edition = "2021";
+        sha256 = "0j8vd8sg2rbln6g3a608qg1a7r2lwxcga78mmxjjin5ybmrfallj";
+        authors = [
+          "Microsoft"
+        ];
+        dependencies = [
+          {
+            name = "windows-core";
+            packageId = "windows-core 0.54.0";
+          }
+          {
+            name = "windows-targets";
+            packageId = "windows-targets 0.52.5";
+          }
+        ];
+        features = {
+          "AI" = [ "Foundation" ];
+          "AI_MachineLearning" = [ "AI" ];
+          "ApplicationModel" = [ "Foundation" ];
+          "ApplicationModel_Activation" = [ "ApplicationModel" ];
+          "ApplicationModel_AppExtensions" = [ "ApplicationModel" ];
+          "ApplicationModel_AppService" = [ "ApplicationModel" ];
+          "ApplicationModel_Appointments" = [ "ApplicationModel" ];
+          "ApplicationModel_Appointments_AppointmentsProvider" = [ "ApplicationModel_Appointments" ];
+          "ApplicationModel_Appointments_DataProvider" = [ "ApplicationModel_Appointments" ];
+          "ApplicationModel_Background" = [ "ApplicationModel" ];
+          "ApplicationModel_Calls" = [ "ApplicationModel" ];
+          "ApplicationModel_Calls_Background" = [ "ApplicationModel_Calls" ];
+          "ApplicationModel_Calls_Provider" = [ "ApplicationModel_Calls" ];
+          "ApplicationModel_Chat" = [ "ApplicationModel" ];
+          "ApplicationModel_CommunicationBlocking" = [ "ApplicationModel" ];
+          "ApplicationModel_Contacts" = [ "ApplicationModel" ];
+          "ApplicationModel_Contacts_DataProvider" = [ "ApplicationModel_Contacts" ];
+          "ApplicationModel_Contacts_Provider" = [ "ApplicationModel_Contacts" ];
+          "ApplicationModel_ConversationalAgent" = [ "ApplicationModel" ];
+          "ApplicationModel_Core" = [ "ApplicationModel" ];
+          "ApplicationModel_DataTransfer" = [ "ApplicationModel" ];
+          "ApplicationModel_DataTransfer_DragDrop" = [ "ApplicationModel_DataTransfer" ];
+          "ApplicationModel_DataTransfer_DragDrop_Core" = [ "ApplicationModel_DataTransfer_DragDrop" ];
+          "ApplicationModel_DataTransfer_ShareTarget" = [ "ApplicationModel_DataTransfer" ];
+          "ApplicationModel_Email" = [ "ApplicationModel" ];
+          "ApplicationModel_Email_DataProvider" = [ "ApplicationModel_Email" ];
+          "ApplicationModel_ExtendedExecution" = [ "ApplicationModel" ];
+          "ApplicationModel_ExtendedExecution_Foreground" = [ "ApplicationModel_ExtendedExecution" ];
+          "ApplicationModel_Holographic" = [ "ApplicationModel" ];
+          "ApplicationModel_LockScreen" = [ "ApplicationModel" ];
+          "ApplicationModel_Payments" = [ "ApplicationModel" ];
+          "ApplicationModel_Payments_Provider" = [ "ApplicationModel_Payments" ];
+          "ApplicationModel_Preview" = [ "ApplicationModel" ];
+          "ApplicationModel_Preview_Holographic" = [ "ApplicationModel_Preview" ];
+          "ApplicationModel_Preview_InkWorkspace" = [ "ApplicationModel_Preview" ];
+          "ApplicationModel_Preview_Notes" = [ "ApplicationModel_Preview" ];
+          "ApplicationModel_Resources" = [ "ApplicationModel" ];
+          "ApplicationModel_Resources_Core" = [ "ApplicationModel_Resources" ];
+          "ApplicationModel_Resources_Management" = [ "ApplicationModel_Resources" ];
+          "ApplicationModel_Search" = [ "ApplicationModel" ];
+          "ApplicationModel_Search_Core" = [ "ApplicationModel_Search" ];
+          "ApplicationModel_Store" = [ "ApplicationModel" ];
+          "ApplicationModel_Store_LicenseManagement" = [ "ApplicationModel_Store" ];
+          "ApplicationModel_Store_Preview" = [ "ApplicationModel_Store" ];
+          "ApplicationModel_Store_Preview_InstallControl" = [ "ApplicationModel_Store_Preview" ];
+          "ApplicationModel_UserActivities" = [ "ApplicationModel" ];
+          "ApplicationModel_UserActivities_Core" = [ "ApplicationModel_UserActivities" ];
+          "ApplicationModel_UserDataAccounts" = [ "ApplicationModel" ];
+          "ApplicationModel_UserDataAccounts_Provider" = [ "ApplicationModel_UserDataAccounts" ];
+          "ApplicationModel_UserDataAccounts_SystemAccess" = [ "ApplicationModel_UserDataAccounts" ];
+          "ApplicationModel_UserDataTasks" = [ "ApplicationModel" ];
+          "ApplicationModel_UserDataTasks_DataProvider" = [ "ApplicationModel_UserDataTasks" ];
+          "ApplicationModel_VoiceCommands" = [ "ApplicationModel" ];
+          "ApplicationModel_Wallet" = [ "ApplicationModel" ];
+          "ApplicationModel_Wallet_System" = [ "ApplicationModel_Wallet" ];
+          "Data" = [ "Foundation" ];
+          "Data_Html" = [ "Data" ];
+          "Data_Json" = [ "Data" ];
+          "Data_Pdf" = [ "Data" ];
+          "Data_Text" = [ "Data" ];
+          "Data_Xml" = [ "Data" ];
+          "Data_Xml_Dom" = [ "Data_Xml" ];
+          "Data_Xml_Xsl" = [ "Data_Xml" ];
+          "Devices" = [ "Foundation" ];
+          "Devices_Adc" = [ "Devices" ];
+          "Devices_Adc_Provider" = [ "Devices_Adc" ];
+          "Devices_Background" = [ "Devices" ];
+          "Devices_Bluetooth" = [ "Devices" ];
+          "Devices_Bluetooth_Advertisement" = [ "Devices_Bluetooth" ];
+          "Devices_Bluetooth_Background" = [ "Devices_Bluetooth" ];
+          "Devices_Bluetooth_GenericAttributeProfile" = [ "Devices_Bluetooth" ];
+          "Devices_Bluetooth_Rfcomm" = [ "Devices_Bluetooth" ];
+          "Devices_Custom" = [ "Devices" ];
+          "Devices_Display" = [ "Devices" ];
+          "Devices_Display_Core" = [ "Devices_Display" ];
+          "Devices_Enumeration" = [ "Devices" ];
+          "Devices_Enumeration_Pnp" = [ "Devices_Enumeration" ];
+          "Devices_Geolocation" = [ "Devices" ];
+          "Devices_Geolocation_Geofencing" = [ "Devices_Geolocation" ];
+          "Devices_Geolocation_Provider" = [ "Devices_Geolocation" ];
+          "Devices_Gpio" = [ "Devices" ];
+          "Devices_Gpio_Provider" = [ "Devices_Gpio" ];
+          "Devices_Haptics" = [ "Devices" ];
+          "Devices_HumanInterfaceDevice" = [ "Devices" ];
+          "Devices_I2c" = [ "Devices" ];
+          "Devices_I2c_Provider" = [ "Devices_I2c" ];
+          "Devices_Input" = [ "Devices" ];
+          "Devices_Input_Preview" = [ "Devices_Input" ];
+          "Devices_Lights" = [ "Devices" ];
+          "Devices_Lights_Effects" = [ "Devices_Lights" ];
+          "Devices_Midi" = [ "Devices" ];
+          "Devices_PointOfService" = [ "Devices" ];
+          "Devices_PointOfService_Provider" = [ "Devices_PointOfService" ];
+          "Devices_Portable" = [ "Devices" ];
+          "Devices_Power" = [ "Devices" ];
+          "Devices_Printers" = [ "Devices" ];
+          "Devices_Printers_Extensions" = [ "Devices_Printers" ];
+          "Devices_Pwm" = [ "Devices" ];
+          "Devices_Pwm_Provider" = [ "Devices_Pwm" ];
+          "Devices_Radios" = [ "Devices" ];
+          "Devices_Scanners" = [ "Devices" ];
+          "Devices_Sensors" = [ "Devices" ];
+          "Devices_Sensors_Custom" = [ "Devices_Sensors" ];
+          "Devices_SerialCommunication" = [ "Devices" ];
+          "Devices_SmartCards" = [ "Devices" ];
+          "Devices_Sms" = [ "Devices" ];
+          "Devices_Spi" = [ "Devices" ];
+          "Devices_Spi_Provider" = [ "Devices_Spi" ];
+          "Devices_Usb" = [ "Devices" ];
+          "Devices_WiFi" = [ "Devices" ];
+          "Devices_WiFiDirect" = [ "Devices" ];
+          "Devices_WiFiDirect_Services" = [ "Devices_WiFiDirect" ];
+          "Embedded" = [ "Foundation" ];
+          "Embedded_DeviceLockdown" = [ "Embedded" ];
+          "Foundation_Collections" = [ "Foundation" ];
+          "Foundation_Diagnostics" = [ "Foundation" ];
+          "Foundation_Metadata" = [ "Foundation" ];
+          "Foundation_Numerics" = [ "Foundation" ];
+          "Gaming" = [ "Foundation" ];
+          "Gaming_Input" = [ "Gaming" ];
+          "Gaming_Input_Custom" = [ "Gaming_Input" ];
+          "Gaming_Input_ForceFeedback" = [ "Gaming_Input" ];
+          "Gaming_Input_Preview" = [ "Gaming_Input" ];
+          "Gaming_Preview" = [ "Gaming" ];
+          "Gaming_Preview_GamesEnumeration" = [ "Gaming_Preview" ];
+          "Gaming_UI" = [ "Gaming" ];
+          "Gaming_XboxLive" = [ "Gaming" ];
+          "Gaming_XboxLive_Storage" = [ "Gaming_XboxLive" ];
+          "Globalization" = [ "Foundation" ];
+          "Globalization_Collation" = [ "Globalization" ];
+          "Globalization_DateTimeFormatting" = [ "Globalization" ];
+          "Globalization_Fonts" = [ "Globalization" ];
+          "Globalization_NumberFormatting" = [ "Globalization" ];
+          "Globalization_PhoneNumberFormatting" = [ "Globalization" ];
+          "Graphics" = [ "Foundation" ];
+          "Graphics_Capture" = [ "Graphics" ];
+          "Graphics_DirectX" = [ "Graphics" ];
+          "Graphics_DirectX_Direct3D11" = [ "Graphics_DirectX" ];
+          "Graphics_Display" = [ "Graphics" ];
+          "Graphics_Display_Core" = [ "Graphics_Display" ];
+          "Graphics_Effects" = [ "Graphics" ];
+          "Graphics_Holographic" = [ "Graphics" ];
+          "Graphics_Imaging" = [ "Graphics" ];
+          "Graphics_Printing" = [ "Graphics" ];
+          "Graphics_Printing3D" = [ "Graphics" ];
+          "Graphics_Printing_OptionDetails" = [ "Graphics_Printing" ];
+          "Graphics_Printing_PrintSupport" = [ "Graphics_Printing" ];
+          "Graphics_Printing_PrintTicket" = [ "Graphics_Printing" ];
+          "Graphics_Printing_Workflow" = [ "Graphics_Printing" ];
+          "Management" = [ "Foundation" ];
+          "Management_Core" = [ "Management" ];
+          "Management_Deployment" = [ "Management" ];
+          "Management_Deployment_Preview" = [ "Management_Deployment" ];
+          "Management_Policies" = [ "Management" ];
+          "Management_Update" = [ "Management" ];
+          "Management_Workplace" = [ "Management" ];
+          "Media" = [ "Foundation" ];
+          "Media_AppBroadcasting" = [ "Media" ];
+          "Media_AppRecording" = [ "Media" ];
+          "Media_Audio" = [ "Media" ];
+          "Media_Capture" = [ "Media" ];
+          "Media_Capture_Core" = [ "Media_Capture" ];
+          "Media_Capture_Frames" = [ "Media_Capture" ];
+          "Media_Casting" = [ "Media" ];
+          "Media_ClosedCaptioning" = [ "Media" ];
+          "Media_ContentRestrictions" = [ "Media" ];
+          "Media_Control" = [ "Media" ];
+          "Media_Core" = [ "Media" ];
+          "Media_Core_Preview" = [ "Media_Core" ];
+          "Media_Devices" = [ "Media" ];
+          "Media_Devices_Core" = [ "Media_Devices" ];
+          "Media_DialProtocol" = [ "Media" ];
+          "Media_Editing" = [ "Media" ];
+          "Media_Effects" = [ "Media" ];
+          "Media_FaceAnalysis" = [ "Media" ];
+          "Media_Import" = [ "Media" ];
+          "Media_MediaProperties" = [ "Media" ];
+          "Media_Miracast" = [ "Media" ];
+          "Media_Ocr" = [ "Media" ];
+          "Media_PlayTo" = [ "Media" ];
+          "Media_Playback" = [ "Media" ];
+          "Media_Playlists" = [ "Media" ];
+          "Media_Protection" = [ "Media" ];
+          "Media_Protection_PlayReady" = [ "Media_Protection" ];
+          "Media_Render" = [ "Media" ];
+          "Media_SpeechRecognition" = [ "Media" ];
+          "Media_SpeechSynthesis" = [ "Media" ];
+          "Media_Streaming" = [ "Media" ];
+          "Media_Streaming_Adaptive" = [ "Media_Streaming" ];
+          "Media_Transcoding" = [ "Media" ];
+          "Networking" = [ "Foundation" ];
+          "Networking_BackgroundTransfer" = [ "Networking" ];
+          "Networking_Connectivity" = [ "Networking" ];
+          "Networking_NetworkOperators" = [ "Networking" ];
+          "Networking_Proximity" = [ "Networking" ];
+          "Networking_PushNotifications" = [ "Networking" ];
+          "Networking_ServiceDiscovery" = [ "Networking" ];
+          "Networking_ServiceDiscovery_Dnssd" = [ "Networking_ServiceDiscovery" ];
+          "Networking_Sockets" = [ "Networking" ];
+          "Networking_Vpn" = [ "Networking" ];
+          "Networking_XboxLive" = [ "Networking" ];
+          "Perception" = [ "Foundation" ];
+          "Perception_Automation" = [ "Perception" ];
+          "Perception_Automation_Core" = [ "Perception_Automation" ];
+          "Perception_People" = [ "Perception" ];
+          "Perception_Spatial" = [ "Perception" ];
+          "Perception_Spatial_Preview" = [ "Perception_Spatial" ];
+          "Perception_Spatial_Surfaces" = [ "Perception_Spatial" ];
+          "Phone" = [ "Foundation" ];
+          "Phone_ApplicationModel" = [ "Phone" ];
+          "Phone_Devices" = [ "Phone" ];
+          "Phone_Devices_Notification" = [ "Phone_Devices" ];
+          "Phone_Devices_Power" = [ "Phone_Devices" ];
+          "Phone_Management" = [ "Phone" ];
+          "Phone_Management_Deployment" = [ "Phone_Management" ];
+          "Phone_Media" = [ "Phone" ];
+          "Phone_Media_Devices" = [ "Phone_Media" ];
+          "Phone_Notification" = [ "Phone" ];
+          "Phone_Notification_Management" = [ "Phone_Notification" ];
+          "Phone_PersonalInformation" = [ "Phone" ];
+          "Phone_PersonalInformation_Provisioning" = [ "Phone_PersonalInformation" ];
+          "Phone_Speech" = [ "Phone" ];
+          "Phone_Speech_Recognition" = [ "Phone_Speech" ];
+          "Phone_StartScreen" = [ "Phone" ];
+          "Phone_System" = [ "Phone" ];
+          "Phone_System_Power" = [ "Phone_System" ];
+          "Phone_System_Profile" = [ "Phone_System" ];
+          "Phone_System_UserProfile" = [ "Phone_System" ];
+          "Phone_System_UserProfile_GameServices" = [ "Phone_System_UserProfile" ];
+          "Phone_System_UserProfile_GameServices_Core" = [ "Phone_System_UserProfile_GameServices" ];
+          "Phone_UI" = [ "Phone" ];
+          "Phone_UI_Input" = [ "Phone_UI" ];
+          "Security" = [ "Foundation" ];
+          "Security_Authentication" = [ "Security" ];
+          "Security_Authentication_Identity" = [ "Security_Authentication" ];
+          "Security_Authentication_Identity_Core" = [ "Security_Authentication_Identity" ];
+          "Security_Authentication_OnlineId" = [ "Security_Authentication" ];
+          "Security_Authentication_Web" = [ "Security_Authentication" ];
+          "Security_Authentication_Web_Core" = [ "Security_Authentication_Web" ];
+          "Security_Authentication_Web_Provider" = [ "Security_Authentication_Web" ];
+          "Security_Authorization" = [ "Security" ];
+          "Security_Authorization_AppCapabilityAccess" = [ "Security_Authorization" ];
+          "Security_Credentials" = [ "Security" ];
+          "Security_Credentials_UI" = [ "Security_Credentials" ];
+          "Security_Cryptography" = [ "Security" ];
+          "Security_Cryptography_Certificates" = [ "Security_Cryptography" ];
+          "Security_Cryptography_Core" = [ "Security_Cryptography" ];
+          "Security_Cryptography_DataProtection" = [ "Security_Cryptography" ];
+          "Security_DataProtection" = [ "Security" ];
+          "Security_EnterpriseData" = [ "Security" ];
+          "Security_ExchangeActiveSyncProvisioning" = [ "Security" ];
+          "Security_Isolation" = [ "Security" ];
+          "Services" = [ "Foundation" ];
+          "Services_Maps" = [ "Services" ];
+          "Services_Maps_Guidance" = [ "Services_Maps" ];
+          "Services_Maps_LocalSearch" = [ "Services_Maps" ];
+          "Services_Maps_OfflineMaps" = [ "Services_Maps" ];
+          "Services_Store" = [ "Services" ];
+          "Services_TargetedContent" = [ "Services" ];
+          "Storage" = [ "Foundation" ];
+          "Storage_AccessCache" = [ "Storage" ];
+          "Storage_BulkAccess" = [ "Storage" ];
+          "Storage_Compression" = [ "Storage" ];
+          "Storage_FileProperties" = [ "Storage" ];
+          "Storage_Pickers" = [ "Storage" ];
+          "Storage_Pickers_Provider" = [ "Storage_Pickers" ];
+          "Storage_Provider" = [ "Storage" ];
+          "Storage_Search" = [ "Storage" ];
+          "Storage_Streams" = [ "Storage" ];
+          "System" = [ "Foundation" ];
+          "System_Diagnostics" = [ "System" ];
+          "System_Diagnostics_DevicePortal" = [ "System_Diagnostics" ];
+          "System_Diagnostics_Telemetry" = [ "System_Diagnostics" ];
+          "System_Diagnostics_TraceReporting" = [ "System_Diagnostics" ];
+          "System_Display" = [ "System" ];
+          "System_Implementation" = [ "System" ];
+          "System_Implementation_FileExplorer" = [ "System_Implementation" ];
+          "System_Inventory" = [ "System" ];
+          "System_Power" = [ "System" ];
+          "System_Profile" = [ "System" ];
+          "System_Profile_SystemManufacturers" = [ "System_Profile" ];
+          "System_RemoteDesktop" = [ "System" ];
+          "System_RemoteDesktop_Input" = [ "System_RemoteDesktop" ];
+          "System_RemoteDesktop_Provider" = [ "System_RemoteDesktop" ];
+          "System_RemoteSystems" = [ "System" ];
+          "System_Threading" = [ "System" ];
+          "System_Threading_Core" = [ "System_Threading" ];
+          "System_Update" = [ "System" ];
+          "System_UserProfile" = [ "System" ];
+          "UI" = [ "Foundation" ];
+          "UI_Accessibility" = [ "UI" ];
+          "UI_ApplicationSettings" = [ "UI" ];
+          "UI_Composition" = [ "UI" ];
+          "UI_Composition_Core" = [ "UI_Composition" ];
+          "UI_Composition_Desktop" = [ "UI_Composition" ];
+          "UI_Composition_Diagnostics" = [ "UI_Composition" ];
+          "UI_Composition_Effects" = [ "UI_Composition" ];
+          "UI_Composition_Interactions" = [ "UI_Composition" ];
+          "UI_Composition_Scenes" = [ "UI_Composition" ];
+          "UI_Core" = [ "UI" ];
+          "UI_Core_AnimationMetrics" = [ "UI_Core" ];
+          "UI_Core_Preview" = [ "UI_Core" ];
+          "UI_Input" = [ "UI" ];
+          "UI_Input_Core" = [ "UI_Input" ];
+          "UI_Input_Inking" = [ "UI_Input" ];
+          "UI_Input_Inking_Analysis" = [ "UI_Input_Inking" ];
+          "UI_Input_Inking_Core" = [ "UI_Input_Inking" ];
+          "UI_Input_Inking_Preview" = [ "UI_Input_Inking" ];
+          "UI_Input_Preview" = [ "UI_Input" ];
+          "UI_Input_Preview_Injection" = [ "UI_Input_Preview" ];
+          "UI_Input_Spatial" = [ "UI_Input" ];
+          "UI_Notifications" = [ "UI" ];
+          "UI_Notifications_Management" = [ "UI_Notifications" ];
+          "UI_Notifications_Preview" = [ "UI_Notifications" ];
+          "UI_Popups" = [ "UI" ];
+          "UI_Shell" = [ "UI" ];
+          "UI_StartScreen" = [ "UI" ];
+          "UI_Text" = [ "UI" ];
+          "UI_Text_Core" = [ "UI_Text" ];
+          "UI_UIAutomation" = [ "UI" ];
+          "UI_UIAutomation_Core" = [ "UI_UIAutomation" ];
+          "UI_ViewManagement" = [ "UI" ];
+          "UI_ViewManagement_Core" = [ "UI_ViewManagement" ];
+          "UI_WebUI" = [ "UI" ];
+          "UI_WebUI_Core" = [ "UI_WebUI" ];
+          "UI_WindowManagement" = [ "UI" ];
+          "UI_WindowManagement_Preview" = [ "UI_WindowManagement" ];
+          "Wdk" = [ "Win32_Foundation" ];
+          "Wdk_Devices" = [ "Wdk" ];
+          "Wdk_Devices_HumanInterfaceDevice" = [ "Wdk_Devices" ];
+          "Wdk_Foundation" = [ "Wdk" ];
+          "Wdk_Graphics" = [ "Wdk" ];
+          "Wdk_Graphics_Direct3D" = [ "Wdk_Graphics" ];
+          "Wdk_NetworkManagement" = [ "Wdk" ];
+          "Wdk_NetworkManagement_Ndis" = [ "Wdk_NetworkManagement" ];
+          "Wdk_NetworkManagement_WindowsFilteringPlatform" = [ "Wdk_NetworkManagement" ];
+          "Wdk_Storage" = [ "Wdk" ];
+          "Wdk_Storage_FileSystem" = [ "Wdk_Storage" ];
+          "Wdk_Storage_FileSystem_Minifilters" = [ "Wdk_Storage_FileSystem" ];
+          "Wdk_System" = [ "Wdk" ];
+          "Wdk_System_IO" = [ "Wdk_System" ];
+          "Wdk_System_OfflineRegistry" = [ "Wdk_System" ];
+          "Wdk_System_Registry" = [ "Wdk_System" ];
+          "Wdk_System_SystemInformation" = [ "Wdk_System" ];
+          "Wdk_System_SystemServices" = [ "Wdk_System" ];
+          "Wdk_System_Threading" = [ "Wdk_System" ];
+          "Web" = [ "Foundation" ];
+          "Web_AtomPub" = [ "Web" ];
+          "Web_Http" = [ "Web" ];
+          "Web_Http_Diagnostics" = [ "Web_Http" ];
+          "Web_Http_Filters" = [ "Web_Http" ];
+          "Web_Http_Headers" = [ "Web_Http" ];
+          "Web_Syndication" = [ "Web" ];
+          "Web_UI" = [ "Web" ];
+          "Web_UI_Interop" = [ "Web_UI" ];
+          "Win32" = [ "Win32_Foundation" ];
+          "Win32_AI" = [ "Win32" ];
+          "Win32_AI_MachineLearning" = [ "Win32_AI" ];
+          "Win32_AI_MachineLearning_DirectML" = [ "Win32_AI_MachineLearning" ];
+          "Win32_AI_MachineLearning_WinML" = [ "Win32_AI_MachineLearning" ];
+          "Win32_Data" = [ "Win32" ];
+          "Win32_Data_HtmlHelp" = [ "Win32_Data" ];
+          "Win32_Data_RightsManagement" = [ "Win32_Data" ];
+          "Win32_Data_Xml" = [ "Win32_Data" ];
+          "Win32_Data_Xml_MsXml" = [ "Win32_Data_Xml" ];
+          "Win32_Data_Xml_XmlLite" = [ "Win32_Data_Xml" ];
+          "Win32_Devices" = [ "Win32" ];
+          "Win32_Devices_AllJoyn" = [ "Win32_Devices" ];
+          "Win32_Devices_BiometricFramework" = [ "Win32_Devices" ];
+          "Win32_Devices_Bluetooth" = [ "Win32_Devices" ];
+          "Win32_Devices_Communication" = [ "Win32_Devices" ];
+          "Win32_Devices_DeviceAccess" = [ "Win32_Devices" ];
+          "Win32_Devices_DeviceAndDriverInstallation" = [ "Win32_Devices" ];
+          "Win32_Devices_DeviceQuery" = [ "Win32_Devices" ];
+          "Win32_Devices_Display" = [ "Win32_Devices" ];
+          "Win32_Devices_Enumeration" = [ "Win32_Devices" ];
+          "Win32_Devices_Enumeration_Pnp" = [ "Win32_Devices_Enumeration" ];
+          "Win32_Devices_Fax" = [ "Win32_Devices" ];
+          "Win32_Devices_FunctionDiscovery" = [ "Win32_Devices" ];
+          "Win32_Devices_Geolocation" = [ "Win32_Devices" ];
+          "Win32_Devices_HumanInterfaceDevice" = [ "Win32_Devices" ];
+          "Win32_Devices_ImageAcquisition" = [ "Win32_Devices" ];
+          "Win32_Devices_PortableDevices" = [ "Win32_Devices" ];
+          "Win32_Devices_Properties" = [ "Win32_Devices" ];
+          "Win32_Devices_Pwm" = [ "Win32_Devices" ];
+          "Win32_Devices_Sensors" = [ "Win32_Devices" ];
+          "Win32_Devices_SerialCommunication" = [ "Win32_Devices" ];
+          "Win32_Devices_Tapi" = [ "Win32_Devices" ];
+          "Win32_Devices_Usb" = [ "Win32_Devices" ];
+          "Win32_Devices_WebServicesOnDevices" = [ "Win32_Devices" ];
+          "Win32_Foundation" = [ "Win32" ];
+          "Win32_Gaming" = [ "Win32" ];
+          "Win32_Globalization" = [ "Win32" ];
+          "Win32_Graphics" = [ "Win32" ];
+          "Win32_Graphics_CompositionSwapchain" = [ "Win32_Graphics" ];
+          "Win32_Graphics_DXCore" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct2D" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct2D_Common" = [ "Win32_Graphics_Direct2D" ];
+          "Win32_Graphics_Direct3D" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D10" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D11" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D11on12" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D12" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D9" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D9on12" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D_Dxc" = [ "Win32_Graphics_Direct3D" ];
+          "Win32_Graphics_Direct3D_Fxc" = [ "Win32_Graphics_Direct3D" ];
+          "Win32_Graphics_DirectComposition" = [ "Win32_Graphics" ];
+          "Win32_Graphics_DirectDraw" = [ "Win32_Graphics" ];
+          "Win32_Graphics_DirectManipulation" = [ "Win32_Graphics" ];
+          "Win32_Graphics_DirectWrite" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Dwm" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Dxgi" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Dxgi_Common" = [ "Win32_Graphics_Dxgi" ];
+          "Win32_Graphics_Gdi" = [ "Win32_Graphics" ];
+          "Win32_Graphics_GdiPlus" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Hlsl" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Imaging" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Imaging_D2D" = [ "Win32_Graphics_Imaging" ];
+          "Win32_Graphics_OpenGL" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Printing" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Printing_PrintTicket" = [ "Win32_Graphics_Printing" ];
+          "Win32_Management" = [ "Win32" ];
+          "Win32_Management_MobileDeviceManagementRegistration" = [ "Win32_Management" ];
+          "Win32_Media" = [ "Win32" ];
+          "Win32_Media_Audio" = [ "Win32_Media" ];
+          "Win32_Media_Audio_Apo" = [ "Win32_Media_Audio" ];
+          "Win32_Media_Audio_DirectMusic" = [ "Win32_Media_Audio" ];
+          "Win32_Media_Audio_DirectSound" = [ "Win32_Media_Audio" ];
+          "Win32_Media_Audio_Endpoints" = [ "Win32_Media_Audio" ];
+          "Win32_Media_Audio_XAudio2" = [ "Win32_Media_Audio" ];
+          "Win32_Media_DeviceManager" = [ "Win32_Media" ];
+          "Win32_Media_DirectShow" = [ "Win32_Media" ];
+          "Win32_Media_DirectShow_Tv" = [ "Win32_Media_DirectShow" ];
+          "Win32_Media_DirectShow_Xml" = [ "Win32_Media_DirectShow" ];
+          "Win32_Media_DxMediaObjects" = [ "Win32_Media" ];
+          "Win32_Media_KernelStreaming" = [ "Win32_Media" ];
+          "Win32_Media_LibrarySharingServices" = [ "Win32_Media" ];
+          "Win32_Media_MediaFoundation" = [ "Win32_Media" ];
+          "Win32_Media_MediaPlayer" = [ "Win32_Media" ];
+          "Win32_Media_Multimedia" = [ "Win32_Media" ];
+          "Win32_Media_PictureAcquisition" = [ "Win32_Media" ];
+          "Win32_Media_Speech" = [ "Win32_Media" ];
+          "Win32_Media_Streaming" = [ "Win32_Media" ];
+          "Win32_Media_WindowsMediaFormat" = [ "Win32_Media" ];
+          "Win32_NetworkManagement" = [ "Win32" ];
+          "Win32_NetworkManagement_Dhcp" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Dns" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_InternetConnectionWizard" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_IpHelper" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_MobileBroadband" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Multicast" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Ndis" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_NetBios" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_NetManagement" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_NetShell" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_NetworkDiagnosticsFramework" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_NetworkPolicyServer" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_P2P" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_QoS" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Rras" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Snmp" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WNet" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WebDav" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WiFi" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WindowsConnectNow" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WindowsConnectionManager" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WindowsFilteringPlatform" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WindowsFirewall" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WindowsNetworkVirtualization" = [ "Win32_NetworkManagement" ];
+          "Win32_Networking" = [ "Win32" ];
+          "Win32_Networking_ActiveDirectory" = [ "Win32_Networking" ];
+          "Win32_Networking_BackgroundIntelligentTransferService" = [ "Win32_Networking" ];
+          "Win32_Networking_Clustering" = [ "Win32_Networking" ];
+          "Win32_Networking_HttpServer" = [ "Win32_Networking" ];
+          "Win32_Networking_Ldap" = [ "Win32_Networking" ];
+          "Win32_Networking_NetworkListManager" = [ "Win32_Networking" ];
+          "Win32_Networking_RemoteDifferentialCompression" = [ "Win32_Networking" ];
+          "Win32_Networking_WebSocket" = [ "Win32_Networking" ];
+          "Win32_Networking_WinHttp" = [ "Win32_Networking" ];
+          "Win32_Networking_WinInet" = [ "Win32_Networking" ];
+          "Win32_Networking_WinSock" = [ "Win32_Networking" ];
+          "Win32_Networking_WindowsWebServices" = [ "Win32_Networking" ];
+          "Win32_Security" = [ "Win32" ];
+          "Win32_Security_AppLocker" = [ "Win32_Security" ];
+          "Win32_Security_Authentication" = [ "Win32_Security" ];
+          "Win32_Security_Authentication_Identity" = [ "Win32_Security_Authentication" ];
+          "Win32_Security_Authentication_Identity_Provider" = [ "Win32_Security_Authentication_Identity" ];
+          "Win32_Security_Authorization" = [ "Win32_Security" ];
+          "Win32_Security_Authorization_UI" = [ "Win32_Security_Authorization" ];
+          "Win32_Security_ConfigurationSnapin" = [ "Win32_Security" ];
+          "Win32_Security_Credentials" = [ "Win32_Security" ];
+          "Win32_Security_Cryptography" = [ "Win32_Security" ];
+          "Win32_Security_Cryptography_Catalog" = [ "Win32_Security_Cryptography" ];
+          "Win32_Security_Cryptography_Certificates" = [ "Win32_Security_Cryptography" ];
+          "Win32_Security_Cryptography_Sip" = [ "Win32_Security_Cryptography" ];
+          "Win32_Security_Cryptography_UI" = [ "Win32_Security_Cryptography" ];
+          "Win32_Security_DiagnosticDataQuery" = [ "Win32_Security" ];
+          "Win32_Security_DirectoryServices" = [ "Win32_Security" ];
+          "Win32_Security_EnterpriseData" = [ "Win32_Security" ];
+          "Win32_Security_ExtensibleAuthenticationProtocol" = [ "Win32_Security" ];
+          "Win32_Security_Isolation" = [ "Win32_Security" ];
+          "Win32_Security_LicenseProtection" = [ "Win32_Security" ];
+          "Win32_Security_NetworkAccessProtection" = [ "Win32_Security" ];
+          "Win32_Security_Tpm" = [ "Win32_Security" ];
+          "Win32_Security_WinTrust" = [ "Win32_Security" ];
+          "Win32_Security_WinWlx" = [ "Win32_Security" ];
+          "Win32_Storage" = [ "Win32" ];
+          "Win32_Storage_Cabinets" = [ "Win32_Storage" ];
+          "Win32_Storage_CloudFilters" = [ "Win32_Storage" ];
+          "Win32_Storage_Compression" = [ "Win32_Storage" ];
+          "Win32_Storage_DataDeduplication" = [ "Win32_Storage" ];
+          "Win32_Storage_DistributedFileSystem" = [ "Win32_Storage" ];
+          "Win32_Storage_EnhancedStorage" = [ "Win32_Storage" ];
+          "Win32_Storage_FileHistory" = [ "Win32_Storage" ];
+          "Win32_Storage_FileServerResourceManager" = [ "Win32_Storage" ];
+          "Win32_Storage_FileSystem" = [ "Win32_Storage" ];
+          "Win32_Storage_Imapi" = [ "Win32_Storage" ];
+          "Win32_Storage_IndexServer" = [ "Win32_Storage" ];
+          "Win32_Storage_InstallableFileSystems" = [ "Win32_Storage" ];
+          "Win32_Storage_IscsiDisc" = [ "Win32_Storage" ];
+          "Win32_Storage_Jet" = [ "Win32_Storage" ];
+          "Win32_Storage_Nvme" = [ "Win32_Storage" ];
+          "Win32_Storage_OfflineFiles" = [ "Win32_Storage" ];
+          "Win32_Storage_OperationRecorder" = [ "Win32_Storage" ];
+          "Win32_Storage_Packaging" = [ "Win32_Storage" ];
+          "Win32_Storage_Packaging_Appx" = [ "Win32_Storage_Packaging" ];
+          "Win32_Storage_Packaging_Opc" = [ "Win32_Storage_Packaging" ];
+          "Win32_Storage_ProjectedFileSystem" = [ "Win32_Storage" ];
+          "Win32_Storage_StructuredStorage" = [ "Win32_Storage" ];
+          "Win32_Storage_Vhd" = [ "Win32_Storage" ];
+          "Win32_Storage_VirtualDiskService" = [ "Win32_Storage" ];
+          "Win32_Storage_Vss" = [ "Win32_Storage" ];
+          "Win32_Storage_Xps" = [ "Win32_Storage" ];
+          "Win32_Storage_Xps_Printing" = [ "Win32_Storage_Xps" ];
+          "Win32_System" = [ "Win32" ];
+          "Win32_System_AddressBook" = [ "Win32_System" ];
+          "Win32_System_Antimalware" = [ "Win32_System" ];
+          "Win32_System_ApplicationInstallationAndServicing" = [ "Win32_System" ];
+          "Win32_System_ApplicationVerifier" = [ "Win32_System" ];
+          "Win32_System_AssessmentTool" = [ "Win32_System" ];
+          "Win32_System_ClrHosting" = [ "Win32_System" ];
+          "Win32_System_Com" = [ "Win32_System" ];
+          "Win32_System_Com_CallObj" = [ "Win32_System_Com" ];
+          "Win32_System_Com_ChannelCredentials" = [ "Win32_System_Com" ];
+          "Win32_System_Com_Events" = [ "Win32_System_Com" ];
+          "Win32_System_Com_Marshal" = [ "Win32_System_Com" ];
+          "Win32_System_Com_StructuredStorage" = [ "Win32_System_Com" ];
+          "Win32_System_Com_UI" = [ "Win32_System_Com" ];
+          "Win32_System_Com_Urlmon" = [ "Win32_System_Com" ];
+          "Win32_System_ComponentServices" = [ "Win32_System" ];
+          "Win32_System_Console" = [ "Win32_System" ];
+          "Win32_System_Contacts" = [ "Win32_System" ];
+          "Win32_System_CorrelationVector" = [ "Win32_System" ];
+          "Win32_System_DataExchange" = [ "Win32_System" ];
+          "Win32_System_DeploymentServices" = [ "Win32_System" ];
+          "Win32_System_DesktopSharing" = [ "Win32_System" ];
+          "Win32_System_DeveloperLicensing" = [ "Win32_System" ];
+          "Win32_System_Diagnostics" = [ "Win32_System" ];
+          "Win32_System_Diagnostics_Ceip" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_ClrProfiling" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_Debug" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_Debug_ActiveScript" = [ "Win32_System_Diagnostics_Debug" ];
+          "Win32_System_Diagnostics_Debug_Extensions" = [ "Win32_System_Diagnostics_Debug" ];
+          "Win32_System_Diagnostics_Etw" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_ProcessSnapshotting" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_ToolHelp" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_TraceLogging" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_DistributedTransactionCoordinator" = [ "Win32_System" ];
+          "Win32_System_Environment" = [ "Win32_System" ];
+          "Win32_System_ErrorReporting" = [ "Win32_System" ];
+          "Win32_System_EventCollector" = [ "Win32_System" ];
+          "Win32_System_EventLog" = [ "Win32_System" ];
+          "Win32_System_EventNotificationService" = [ "Win32_System" ];
+          "Win32_System_GroupPolicy" = [ "Win32_System" ];
+          "Win32_System_HostCompute" = [ "Win32_System" ];
+          "Win32_System_HostComputeNetwork" = [ "Win32_System" ];
+          "Win32_System_HostComputeSystem" = [ "Win32_System" ];
+          "Win32_System_Hypervisor" = [ "Win32_System" ];
+          "Win32_System_IO" = [ "Win32_System" ];
+          "Win32_System_Iis" = [ "Win32_System" ];
+          "Win32_System_Ioctl" = [ "Win32_System" ];
+          "Win32_System_JobObjects" = [ "Win32_System" ];
+          "Win32_System_Js" = [ "Win32_System" ];
+          "Win32_System_Kernel" = [ "Win32_System" ];
+          "Win32_System_LibraryLoader" = [ "Win32_System" ];
+          "Win32_System_Mailslots" = [ "Win32_System" ];
+          "Win32_System_Mapi" = [ "Win32_System" ];
+          "Win32_System_Memory" = [ "Win32_System" ];
+          "Win32_System_Memory_NonVolatile" = [ "Win32_System_Memory" ];
+          "Win32_System_MessageQueuing" = [ "Win32_System" ];
+          "Win32_System_MixedReality" = [ "Win32_System" ];
+          "Win32_System_Mmc" = [ "Win32_System" ];
+          "Win32_System_Ole" = [ "Win32_System" ];
+          "Win32_System_ParentalControls" = [ "Win32_System" ];
+          "Win32_System_PasswordManagement" = [ "Win32_System" ];
+          "Win32_System_Performance" = [ "Win32_System" ];
+          "Win32_System_Performance_HardwareCounterProfiling" = [ "Win32_System_Performance" ];
+          "Win32_System_Pipes" = [ "Win32_System" ];
+          "Win32_System_Power" = [ "Win32_System" ];
+          "Win32_System_ProcessStatus" = [ "Win32_System" ];
+          "Win32_System_RealTimeCommunications" = [ "Win32_System" ];
+          "Win32_System_Recovery" = [ "Win32_System" ];
+          "Win32_System_Registry" = [ "Win32_System" ];
+          "Win32_System_RemoteAssistance" = [ "Win32_System" ];
+          "Win32_System_RemoteDesktop" = [ "Win32_System" ];
+          "Win32_System_RemoteManagement" = [ "Win32_System" ];
+          "Win32_System_RestartManager" = [ "Win32_System" ];
+          "Win32_System_Restore" = [ "Win32_System" ];
+          "Win32_System_Rpc" = [ "Win32_System" ];
+          "Win32_System_Search" = [ "Win32_System" ];
+          "Win32_System_Search_Common" = [ "Win32_System_Search" ];
+          "Win32_System_SecurityCenter" = [ "Win32_System" ];
+          "Win32_System_ServerBackup" = [ "Win32_System" ];
+          "Win32_System_Services" = [ "Win32_System" ];
+          "Win32_System_SettingsManagementInfrastructure" = [ "Win32_System" ];
+          "Win32_System_SetupAndMigration" = [ "Win32_System" ];
+          "Win32_System_Shutdown" = [ "Win32_System" ];
+          "Win32_System_SideShow" = [ "Win32_System" ];
+          "Win32_System_StationsAndDesktops" = [ "Win32_System" ];
+          "Win32_System_SubsystemForLinux" = [ "Win32_System" ];
+          "Win32_System_SystemInformation" = [ "Win32_System" ];
+          "Win32_System_SystemServices" = [ "Win32_System" ];
+          "Win32_System_TaskScheduler" = [ "Win32_System" ];
+          "Win32_System_Threading" = [ "Win32_System" ];
+          "Win32_System_Time" = [ "Win32_System" ];
+          "Win32_System_TpmBaseServices" = [ "Win32_System" ];
+          "Win32_System_TransactionServer" = [ "Win32_System" ];
+          "Win32_System_UpdateAgent" = [ "Win32_System" ];
+          "Win32_System_UpdateAssessment" = [ "Win32_System" ];
+          "Win32_System_UserAccessLogging" = [ "Win32_System" ];
+          "Win32_System_Variant" = [ "Win32_System" ];
+          "Win32_System_VirtualDosMachines" = [ "Win32_System" ];
+          "Win32_System_WinRT" = [ "Win32_System" ];
+          "Win32_System_WinRT_AllJoyn" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Composition" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_CoreInputView" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Direct3D11" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Display" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Graphics" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Graphics_Capture" = [ "Win32_System_WinRT_Graphics" ];
+          "Win32_System_WinRT_Graphics_Direct2D" = [ "Win32_System_WinRT_Graphics" ];
+          "Win32_System_WinRT_Graphics_Imaging" = [ "Win32_System_WinRT_Graphics" ];
+          "Win32_System_WinRT_Holographic" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Isolation" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_ML" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Media" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Metadata" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Pdf" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Printing" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Shell" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Storage" = [ "Win32_System_WinRT" ];
+          "Win32_System_WindowsProgramming" = [ "Win32_System" ];
+          "Win32_System_WindowsSync" = [ "Win32_System" ];
+          "Win32_System_Wmi" = [ "Win32_System" ];
+          "Win32_UI" = [ "Win32" ];
+          "Win32_UI_Accessibility" = [ "Win32_UI" ];
+          "Win32_UI_Animation" = [ "Win32_UI" ];
+          "Win32_UI_ColorSystem" = [ "Win32_UI" ];
+          "Win32_UI_Controls" = [ "Win32_UI" ];
+          "Win32_UI_Controls_Dialogs" = [ "Win32_UI_Controls" ];
+          "Win32_UI_Controls_RichEdit" = [ "Win32_UI_Controls" ];
+          "Win32_UI_HiDpi" = [ "Win32_UI" ];
+          "Win32_UI_Input" = [ "Win32_UI" ];
+          "Win32_UI_Input_Ime" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_Ink" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_KeyboardAndMouse" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_Pointer" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_Radial" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_Touch" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_XboxController" = [ "Win32_UI_Input" ];
+          "Win32_UI_InteractionContext" = [ "Win32_UI" ];
+          "Win32_UI_LegacyWindowsEnvironmentFeatures" = [ "Win32_UI" ];
+          "Win32_UI_Magnification" = [ "Win32_UI" ];
+          "Win32_UI_Notifications" = [ "Win32_UI" ];
+          "Win32_UI_Ribbon" = [ "Win32_UI" ];
+          "Win32_UI_Shell" = [ "Win32_UI" ];
+          "Win32_UI_Shell_Common" = [ "Win32_UI_Shell" ];
+          "Win32_UI_Shell_PropertiesSystem" = [ "Win32_UI_Shell" ];
+          "Win32_UI_TabletPC" = [ "Win32_UI" ];
+          "Win32_UI_TextServices" = [ "Win32_UI" ];
+          "Win32_UI_WindowsAndMessaging" = [ "Win32_UI" ];
+          "Win32_UI_Wpf" = [ "Win32_UI" ];
+          "Win32_Web" = [ "Win32" ];
+          "Win32_Web_InternetExplorer" = [ "Win32_Web" ];
+          "implement" = [ "windows-implement" "windows-interface" "windows-core/implement" ];
+          "windows-implement" = [ "dep:windows-implement" ];
+          "windows-interface" = [ "dep:windows-interface" ];
+        };
+        resolvedDefaultFeatures = [ "Win32" "Win32_Foundation" "Win32_System" "Win32_System_Diagnostics" "Win32_System_Diagnostics_Debug" "Win32_System_Kernel" "Win32_System_Memory" "Win32_System_SystemInformation" "default" ];
+      };
+      "windows-core 0.52.0" = rec {
         crateName = "windows-core";
         version = "0.52.0";
         edition = "2021";
@@ -16032,12 +16705,53 @@ rec {
         dependencies = [
           {
             name = "windows-targets";
-            packageId = "windows-targets 0.52.0";
+            packageId = "windows-targets 0.52.5";
           }
         ];
         features = { };
         resolvedDefaultFeatures = [ "default" ];
       };
+      "windows-core 0.54.0" = rec {
+        crateName = "windows-core";
+        version = "0.54.0";
+        edition = "2021";
+        sha256 = "0r8x2sgl4qq1h23ldf4z7cj213k0bz7479m8a156h79mi6f1nrhj";
+        authors = [
+          "Microsoft"
+        ];
+        dependencies = [
+          {
+            name = "windows-result";
+            packageId = "windows-result";
+          }
+          {
+            name = "windows-targets";
+            packageId = "windows-targets 0.52.5";
+          }
+        ];
+        features = { };
+        resolvedDefaultFeatures = [ "default" ];
+      };
+      "windows-result" = rec {
+        crateName = "windows-result";
+        version = "0.1.2";
+        edition = "2021";
+        sha256 = "1y274q1v0vy21lhkgslpxpq1m08hvr1mcs2l88h1b1gcx0136f2y";
+        libName = "windows_result";
+        authors = [
+          "Microsoft"
+        ];
+        dependencies = [
+          {
+            name = "windows-targets";
+            packageId = "windows-targets 0.52.5";
+          }
+        ];
+        features = {
+          "default" = [ "std" ];
+        };
+        resolvedDefaultFeatures = [ "default" "std" ];
+      };
       "windows-sys 0.48.0" = rec {
         crateName = "windows-sys";
         version = "0.48.0";
@@ -16342,7 +17056,7 @@ rec {
         dependencies = [
           {
             name = "windows-targets";
-            packageId = "windows-targets 0.52.0";
+            packageId = "windows-targets 0.52.5";
           }
         ];
         features = {
@@ -16576,7 +17290,7 @@ rec {
           "Win32_Web" = [ "Win32" ];
           "Win32_Web_InternetExplorer" = [ "Win32_Web" ];
         };
-        resolvedDefaultFeatures = [ "Wdk" "Wdk_Foundation" "Wdk_Storage" "Wdk_Storage_FileSystem" "Win32" "Win32_Foundation" "Win32_NetworkManagement" "Win32_NetworkManagement_IpHelper" "Win32_Networking" "Win32_Networking_WinSock" "Win32_Security" "Win32_Security_Authentication" "Win32_Security_Authentication_Identity" "Win32_Security_Credentials" "Win32_Security_Cryptography" "Win32_Storage" "Win32_Storage_FileSystem" "Win32_System" "Win32_System_Com" "Win32_System_Console" "Win32_System_Diagnostics" "Win32_System_Diagnostics_Debug" "Win32_System_IO" "Win32_System_LibraryLoader" "Win32_System_Memory" "Win32_System_Threading" "Win32_System_WindowsProgramming" "Win32_UI" "Win32_UI_Shell" "default" ];
+        resolvedDefaultFeatures = [ "Wdk" "Wdk_Foundation" "Wdk_Storage" "Wdk_Storage_FileSystem" "Win32" "Win32_Foundation" "Win32_NetworkManagement" "Win32_NetworkManagement_IpHelper" "Win32_Networking" "Win32_Networking_WinSock" "Win32_Security" "Win32_Security_Authentication" "Win32_Security_Authentication_Identity" "Win32_Security_Credentials" "Win32_Security_Cryptography" "Win32_Storage" "Win32_Storage_FileSystem" "Win32_System" "Win32_System_Com" "Win32_System_Console" "Win32_System_Diagnostics" "Win32_System_Diagnostics_Debug" "Win32_System_IO" "Win32_System_LibraryLoader" "Win32_System_Memory" "Win32_System_Threading" "Win32_System_WindowsProgramming" "Win32_UI" "Win32_UI_Input" "Win32_UI_Input_KeyboardAndMouse" "Win32_UI_Shell" "default" ];
       };
       "windows-targets 0.48.5" = rec {
         crateName = "windows-targets";
@@ -16625,49 +17339,54 @@ rec {
         ];
 
       };
-      "windows-targets 0.52.0" = rec {
+      "windows-targets 0.52.5" = rec {
         crateName = "windows-targets";
-        version = "0.52.0";
+        version = "0.52.5";
         edition = "2021";
-        sha256 = "1kg7a27ynzw8zz3krdgy6w5gbqcji27j1sz4p7xk2j5j8082064a";
+        sha256 = "1sz7jrnkygmmlj1ia8fk85wbyil450kq5qkh5qh9sh2rcnj161vg";
         authors = [
           "Microsoft"
         ];
         dependencies = [
           {
             name = "windows_aarch64_gnullvm";
-            packageId = "windows_aarch64_gnullvm 0.52.0";
+            packageId = "windows_aarch64_gnullvm 0.52.5";
             target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-pc-windows-gnullvm");
           }
           {
             name = "windows_aarch64_msvc";
-            packageId = "windows_aarch64_msvc 0.52.0";
+            packageId = "windows_aarch64_msvc 0.52.5";
             target = { target, features }: (("aarch64" == target."arch" or null) && ("msvc" == target."env" or null) && (!(target."windows_raw_dylib" or false)));
           }
           {
             name = "windows_i686_gnu";
-            packageId = "windows_i686_gnu 0.52.0";
-            target = { target, features }: (("x86" == target."arch" or null) && ("gnu" == target."env" or null) && (!(target."windows_raw_dylib" or false)));
+            packageId = "windows_i686_gnu 0.52.5";
+            target = { target, features }: (("x86" == target."arch" or null) && ("gnu" == target."env" or null) && (!("llvm" == target."abi" or null)) && (!(target."windows_raw_dylib" or false)));
+          }
+          {
+            name = "windows_i686_gnullvm";
+            packageId = "windows_i686_gnullvm";
+            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "i686-pc-windows-gnullvm");
           }
           {
             name = "windows_i686_msvc";
-            packageId = "windows_i686_msvc 0.52.0";
+            packageId = "windows_i686_msvc 0.52.5";
             target = { target, features }: (("x86" == target."arch" or null) && ("msvc" == target."env" or null) && (!(target."windows_raw_dylib" or false)));
           }
           {
             name = "windows_x86_64_gnu";
-            packageId = "windows_x86_64_gnu 0.52.0";
+            packageId = "windows_x86_64_gnu 0.52.5";
             target = { target, features }: (("x86_64" == target."arch" or null) && ("gnu" == target."env" or null) && (!("llvm" == target."abi" or null)) && (!(target."windows_raw_dylib" or false)));
           }
           {
             name = "windows_x86_64_gnullvm";
-            packageId = "windows_x86_64_gnullvm 0.52.0";
+            packageId = "windows_x86_64_gnullvm 0.52.5";
             target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnullvm");
           }
           {
             name = "windows_x86_64_msvc";
-            packageId = "windows_x86_64_msvc 0.52.0";
-            target = { target, features }: (("x86_64" == target."arch" or null) && ("msvc" == target."env" or null) && (!(target."windows_raw_dylib" or false)));
+            packageId = "windows_x86_64_msvc 0.52.5";
+            target = { target, features }: ((("x86_64" == target."arch" or null) || ("arm64ec" == target."arch" or null)) && ("msvc" == target."env" or null) && (!(target."windows_raw_dylib" or false)));
           }
         ];
 
@@ -16682,11 +17401,11 @@ rec {
         ];
 
       };
-      "windows_aarch64_gnullvm 0.52.0" = rec {
+      "windows_aarch64_gnullvm 0.52.5" = rec {
         crateName = "windows_aarch64_gnullvm";
-        version = "0.52.0";
+        version = "0.52.5";
         edition = "2021";
-        sha256 = "1shmn1kbdc0bpphcxz0vlph96bxz0h1jlmh93s9agf2dbpin8xyb";
+        sha256 = "0qrjimbj67nnyn7zqy15mzzmqg0mn5gsr2yciqjxm3cb3vbyx23h";
         authors = [
           "Microsoft"
         ];
@@ -16702,11 +17421,11 @@ rec {
         ];
 
       };
-      "windows_aarch64_msvc 0.52.0" = rec {
+      "windows_aarch64_msvc 0.52.5" = rec {
         crateName = "windows_aarch64_msvc";
-        version = "0.52.0";
+        version = "0.52.5";
         edition = "2021";
-        sha256 = "1vvmy1ypvzdvxn9yf0b8ygfl85gl2gpcyvsvqppsmlpisil07amv";
+        sha256 = "1dmga8kqlmln2ibckk6mxc9n59vdg8ziqa2zr8awcl720hazv1cr";
         authors = [
           "Microsoft"
         ];
@@ -16722,11 +17441,21 @@ rec {
         ];
 
       };
-      "windows_i686_gnu 0.52.0" = rec {
+      "windows_i686_gnu 0.52.5" = rec {
         crateName = "windows_i686_gnu";
-        version = "0.52.0";
+        version = "0.52.5";
+        edition = "2021";
+        sha256 = "0w4np3l6qwlra9s2xpflqrs60qk1pz6ahhn91rr74lvdy4y0gfl8";
+        authors = [
+          "Microsoft"
+        ];
+
+      };
+      "windows_i686_gnullvm" = rec {
+        crateName = "windows_i686_gnullvm";
+        version = "0.52.5";
         edition = "2021";
-        sha256 = "04zkglz4p3pjsns5gbz85v4s5aw102raz4spj4b0lmm33z5kg1m2";
+        sha256 = "1s9f4gff0cixd86mw3n63rpmsm4pmr4ffndl6s7qa2h35492dx47";
         authors = [
           "Microsoft"
         ];
@@ -16742,11 +17471,11 @@ rec {
         ];
 
       };
-      "windows_i686_msvc 0.52.0" = rec {
+      "windows_i686_msvc 0.52.5" = rec {
         crateName = "windows_i686_msvc";
-        version = "0.52.0";
+        version = "0.52.5";
         edition = "2021";
-        sha256 = "16kvmbvx0vr0zbgnaz6nsks9ycvfh5xp05bjrhq65kj623iyirgz";
+        sha256 = "1gw7fklxywgpnwbwg43alb4hm0qjmx72hqrlwy5nanrxs7rjng6v";
         authors = [
           "Microsoft"
         ];
@@ -16762,11 +17491,11 @@ rec {
         ];
 
       };
-      "windows_x86_64_gnu 0.52.0" = rec {
+      "windows_x86_64_gnu 0.52.5" = rec {
         crateName = "windows_x86_64_gnu";
-        version = "0.52.0";
+        version = "0.52.5";
         edition = "2021";
-        sha256 = "1zdy4qn178sil5sdm63lm7f0kkcjg6gvdwmcprd2yjmwn8ns6vrx";
+        sha256 = "1n8p2mcf3lw6300k77a0knksssmgwb9hynl793mhkzyydgvlchjf";
         authors = [
           "Microsoft"
         ];
@@ -16782,11 +17511,11 @@ rec {
         ];
 
       };
-      "windows_x86_64_gnullvm 0.52.0" = rec {
+      "windows_x86_64_gnullvm 0.52.5" = rec {
         crateName = "windows_x86_64_gnullvm";
-        version = "0.52.0";
+        version = "0.52.5";
         edition = "2021";
-        sha256 = "17lllq4l2k1lqgcnw1cccphxp9vs7inq99kjlm2lfl9zklg7wr8s";
+        sha256 = "15n56jrh4s5bz66zimavr1rmcaw6wa306myrvmbc6rydhbj9h8l5";
         authors = [
           "Microsoft"
         ];
@@ -16802,11 +17531,11 @@ rec {
         ];
 
       };
-      "windows_x86_64_msvc 0.52.0" = rec {
+      "windows_x86_64_msvc 0.52.5" = rec {
         crateName = "windows_x86_64_msvc";
-        version = "0.52.0";
+        version = "0.52.5";
         edition = "2021";
-        sha256 = "012wfq37f18c09ij5m6rniw7xxn5fcvrxbqd0wd8vgnl3hfn9yfz";
+        sha256 = "1w1bn24ap8dp9i85s8mlg8cim2bl2368bd6qyvm0xzqvzmdpxi5y";
         authors = [
           "Microsoft"
         ];
@@ -16893,18 +17622,6 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "unsupported" ];
       };
-      "xml-rs" = rec {
-        crateName = "xml-rs";
-        version = "0.8.19";
-        edition = "2021";
-        crateBin = [ ];
-        sha256 = "0nnpvk3fv32hgh7vs9gbg2swmzxx5yz73f4b7rak7q39q2x9rjqg";
-        libName = "xml";
-        authors = [
-          "Vladimir Matveev <vmatveev@citrine.cc>"
-        ];
-
-      };
       "xz2" = rec {
         crateName = "xz2";
         version = "0.1.7";
@@ -17446,8 +18163,6 @@ rec {
             buildRustCrateForPkgsFunc pkgs
               (
                 crateConfig // {
-                  # https://github.com/NixOS/nixpkgs/issues/218712
-                  dontStrip = stdenv.hostPlatform.isDarwin;
                   src = crateConfig.src or (
                     pkgs.fetchurl rec {
                       name = "${crateConfig.crateName}-${crateConfig.version}.tar.gz";
diff --git a/tvix/Cargo.toml b/tvix/Cargo.toml
index 6cd19831dc..ed5c6d0d8b 100644
--- a/tvix/Cargo.toml
+++ b/tvix/Cargo.toml
@@ -28,8 +28,14 @@ members = [
   "nix-compat",
   "serde",
   "store",
+  "tracing",
 ]
 
+[workspace.lints.clippy]
+# Allow blocks_in_conditions due to false positives with #[tracing::instrument(…)]:
+# https://github.com/rust-lang/rust-clippy/issues/12281
+blocks_in_conditions = "allow"
+
 # Add a profile to all targets that enables release optimisations, but
 # retains debug symbols. This is great for use with
 # benchmarking/profiling tools.
diff --git a/tvix/README.md b/tvix/README.md
index bf96afa4ba..7f3fcc0615 100644
--- a/tvix/README.md
+++ b/tvix/README.md
@@ -61,8 +61,7 @@ This folder contains the following components:
 * `//tvix/castore` - subtree storage/transfer in a content-addressed fashion
 * `//tvix/cli` - preliminary REPL & CLI implementation for Tvix
 * `//tvix/eval` - an implementation of the Nix programming language
-* `//tvix/nar-bridge`
-  * `nar-bridge-http`: A HTTP webserver providing a Nix HTTP Binary Cache interface in front of a tvix-store
+* `//tvix/nar-bridge-go` - a HTTP webserver providing a Nix HTTP Binary Cache interface in front of a tvix-store
 * `//tvix/nix-compat` - a Rust library for compatibility with C++ Nix, features like encodings and hashing schemes and formats
 * `//tvix/serde` - a Rust library for using the Nix language for app configuration
 * `//tvix/store` - a "filesystem" linking Nix store paths and metadata with the content-addressed layer
@@ -105,6 +104,11 @@ Rust projects under `//tvix`, be sure to run
 `mg run //tools:crate2nix-generate` in `//tvix` itself and commit the changes
 to the generated `Cargo.nix` file. This only applies to the full TVL checkout.
 
+When adding/removing a Cargo feature for a crate, you will want to add it to the
+features power set that gets tested in CI. For each crate there's a default.nix with a
+`mkFeaturePowerset` invocation, modify the list to include/remove the feature.
+Note that you don't want to add "collection" features, such as `fs` for tvix-[ca]store or `default`.
+
 ## License structure
 
 All code implemented for Tvix is licensed under the GPL-3.0, with the
diff --git a/tvix/boot/README.md b/tvix/boot/README.md
index 13a4855060..9c7b722a7a 100644
--- a/tvix/boot/README.md
+++ b/tvix/boot/README.md
@@ -43,7 +43,7 @@ Potentially copy some data into tvix-store (via nar-bridge):
 
 ```
 mg run //tvix:store -- daemon &
-$(mg build //tvix:nar-bridge)/bin/nar-bridge-http &
+$(mg build //tvix:nar-bridge-go)/bin/nar-bridge-http &
 rm -Rf ~/.cache/nix; nix copy --to http://localhost:9000\?compression\=none $(mg build //third_party/nixpkgs:hello)
 pkill nar-bridge-http; pkill tvix-store
 ```
diff --git a/tvix/build-go/build.pb.go b/tvix/build-go/build.pb.go
index 9c6bd5f248..d7f7fd82ab 100644
--- a/tvix/build-go/build.pb.go
+++ b/tvix/build-go/build.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.1
 // 	protoc        (unknown)
 // source: tvix/build/protos/build.proto
 
diff --git a/tvix/build-go/rpc_build.pb.go b/tvix/build-go/rpc_build.pb.go
index aae0cd9d47..361cd179b2 100644
--- a/tvix/build-go/rpc_build.pb.go
+++ b/tvix/build-go/rpc_build.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.1
 // 	protoc        (unknown)
 // source: tvix/build/protos/rpc_build.proto
 
diff --git a/tvix/build/Cargo.toml b/tvix/build/Cargo.toml
index 626fd35d77..6faa3ad7ab 100644
--- a/tvix/build/Cargo.toml
+++ b/tvix/build/Cargo.toml
@@ -10,11 +10,11 @@ itertools = "0.12.0"
 prost = "0.12.1"
 thiserror = "1.0.56"
 tokio = { version = "1.32.0" }
-tokio-listener = { version = "0.3.2", features = [ "tonic011" ] }
+tokio-listener = { version = "0.4.1", features = [ "tonic011" ] }
 tonic = { version = "0.11.0", features = ["tls", "tls-roots"] }
 tvix-castore = { path = "../castore" }
-tracing = "0.1.37"
-tracing-subscriber = { version = "0.3.16", features = ["json"] }
+tvix-tracing = { path = "../tracing" }
+tracing = "0.1.40"
 url = "2.4.0"
 
 [dependencies.tonic-reflection]
@@ -27,7 +27,10 @@ tonic-build = "0.11.0"
 
 [features]
 default = []
-tonic-reflection = ["dep:tonic-reflection"]
+tonic-reflection = ["dep:tonic-reflection", "tvix-castore/tonic-reflection"]
 
 [dev-dependencies]
 rstest = "0.19.0"
+
+[lints]
+workspace = true
diff --git a/tvix/build/default.nix b/tvix/build/default.nix
index a2a3bea0c5..aafab92fd5 100644
--- a/tvix/build/default.nix
+++ b/tvix/build/default.nix
@@ -1,5 +1,11 @@
-{ depot, pkgs, ... }:
+{ depot, lib, ... }:
 
-depot.tvix.crates.workspaceMembers.tvix-build.build.override {
+(depot.tvix.crates.workspaceMembers.tvix-build.build.override {
   runTests = true;
-}
+}).overrideAttrs (old: rec {
+  meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+  passthru = depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = [ "tonic-reflection" ];
+  };
+})
diff --git a/tvix/build/src/bin/tvix-build.rs b/tvix/build/src/bin/tvix-build.rs
index ed36c8933c..26f2044af3 100644
--- a/tvix/build/src/bin/tvix-build.rs
+++ b/tvix/build/src/bin/tvix-build.rs
@@ -7,7 +7,6 @@ use tokio_listener::SystemOptions;
 use tokio_listener::UserOptions;
 use tonic::{self, transport::Server};
 use tracing::{info, Level};
-use tracing_subscriber::prelude::*;
 use tvix_build::{
     buildservice,
     proto::{build_service_server::BuildServiceServer, GRPCBuildServiceWrapper},
@@ -23,12 +22,12 @@ use tvix_castore::proto::FILE_DESCRIPTOR_SET as CASTORE_FILE_DESCRIPTOR_SET;
 #[derive(Parser)]
 #[command(author, version, about, long_about = None)]
 struct Cli {
-    /// Whether to log in JSON
-    #[arg(long)]
-    json: bool,
-
-    #[arg(long)]
-    log_level: Option<Level>,
+    /// A global log level to use when printing logs.
+    /// It's also possible to set `RUST_LOG` according to
+    /// `tracing_subscriber::filter::EnvFilter`, which will always have
+    /// priority.
+    #[arg(long, default_value_t=Level::INFO)]
+    log_level: Level,
 
     #[command(subcommand)]
     command: Commands,
@@ -55,26 +54,9 @@ enum Commands {
 async fn main() -> Result<(), Box<dyn std::error::Error>> {
     let cli = Cli::parse();
 
-    // configure log settings
-    let level = cli.log_level.unwrap_or(Level::INFO);
-
-    let subscriber = tracing_subscriber::registry()
-        .with(
-            cli.json.then_some(
-                tracing_subscriber::fmt::Layer::new()
-                    .with_writer(std::io::stderr.with_max_level(level))
-                    .json(),
-            ),
-        )
-        .with(
-            (!cli.json).then_some(
-                tracing_subscriber::fmt::Layer::new()
-                    .with_writer(std::io::stderr.with_max_level(level))
-                    .pretty(),
-            ),
-        );
-
-    tracing::subscriber::set_global_default(subscriber).expect("Unable to set global subscriber");
+    let _ = tvix_tracing::TracingBuilder::default()
+        .level(cli.log_level)
+        .enable_progressbar();
 
     match cli.command {
         Commands::Daemon {
diff --git a/tvix/castore-go/castore.pb.go b/tvix/castore-go/castore.pb.go
index 464f1d4a41..555336418f 100644
--- a/tvix/castore-go/castore.pb.go
+++ b/tvix/castore-go/castore.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.1
 // 	protoc        (unknown)
 // source: tvix/castore/protos/castore.proto
 
diff --git a/tvix/castore-go/rpc_blobstore.pb.go b/tvix/castore-go/rpc_blobstore.pb.go
index 3607a65bbe..0a2adba4bb 100644
--- a/tvix/castore-go/rpc_blobstore.pb.go
+++ b/tvix/castore-go/rpc_blobstore.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.1
 // 	protoc        (unknown)
 // source: tvix/castore/protos/rpc_blobstore.proto
 
diff --git a/tvix/castore-go/rpc_directory.pb.go b/tvix/castore-go/rpc_directory.pb.go
index 78c4a243e3..0277425c52 100644
--- a/tvix/castore-go/rpc_directory.pb.go
+++ b/tvix/castore-go/rpc_directory.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.1
 // 	protoc        (unknown)
 // source: tvix/castore/protos/rpc_directory.proto
 
diff --git a/tvix/castore/Cargo.toml b/tvix/castore/Cargo.toml
index 2797ef08f2..9516130570 100644
--- a/tvix/castore/Cargo.toml
+++ b/tvix/castore/Cargo.toml
@@ -4,6 +4,7 @@ version = "0.1.0"
 edition = "2021"
 
 [dependencies]
+async-compression = { version = "0.4.9", features = ["tokio", "zstd"]}
 async-stream = "0.3.5"
 async-tempfile = "0.4.0"
 blake3 = { version = "1.3.1", features = ["rayon", "std", "traits-preview"] }
@@ -21,12 +22,14 @@ prost = "0.12.1"
 sled = { version = "0.34.7" }
 thiserror = "1.0.38"
 tokio-stream = { version = "0.1.14", features = ["fs", "net"] }
-tokio-util = { version = "0.7.9", features = ["io", "io-util"] }
+tokio-util = { version = "0.7.9", features = ["io", "io-util", "codec"] }
 tokio-tar = "0.3.1"
 tokio = { version = "1.32.0", features = ["fs", "macros", "net", "rt", "rt-multi-thread", "signal"] }
 tonic = "0.11.0"
 tower = "0.4.13"
 tracing = "0.1.37"
+tracing-indicatif = "0.3.6"
+tvix-tracing = { path = "../tracing", features = ["tonic"] }
 url = "2.4.0"
 walkdir = "2.4.0"
 zstd = "0.13.0"
@@ -49,6 +52,10 @@ version = "0.11.0"
 optional = true
 version = "0.2.144"
 
+[dependencies.threadpool]
+version = "1.8.1"
+optional = true
+
 [dependencies.tonic-reflection]
 optional = true
 version = "0.11.0"
@@ -91,14 +98,14 @@ rstest_reuse = "0.6.0"
 xattr = "1.3.1"
 
 [features]
-default = []
+default = ["cloud"]
 cloud = [
   "dep:bigtable_rs",
   "object_store/aws",
   "object_store/azure",
   "object_store/gcp",
 ]
-fs = ["dep:libc", "dep:fuse-backend-rs"]
+fs = ["dep:fuse-backend-rs", "dep:threadpool", "dep:libc"]
 virtiofs = [
   "fs",
   "dep:vhost",
@@ -116,3 +123,6 @@ tonic-reflection = ["dep:tonic-reflection"]
 # Requires the following packages in $PATH:
 # cbtemulator, google-cloud-bigtable-tool
 integration = []
+
+[lints]
+workspace = true
diff --git a/tvix/castore/default.nix b/tvix/castore/default.nix
index 641d883760..03a12b6c20 100644
--- a/tvix/castore/default.nix
+++ b/tvix/castore/default.nix
@@ -1,23 +1,28 @@
-{ depot, pkgs, ... }:
+{ depot, pkgs, lib, ... }:
 
 (depot.tvix.crates.workspaceMembers.tvix-castore.build.override {
   runTests = true;
   testPreRun = ''
     export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt;
   '';
-
-  # enable some optional features.
-  features = [ "default" "cloud" ];
-}).overrideAttrs (_: {
-  meta.ci.targets = [ "integration-tests" ];
-  passthru.integration-tests = depot.tvix.crates.workspaceMembers.tvix-castore.build.override {
-    runTests = true;
-    testPreRun = ''
-      export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt;
-      export PATH="$PATH:${pkgs.lib.makeBinPath [pkgs.cbtemulator pkgs.google-cloud-bigtable-tool]}"
+}).overrideAttrs (old: rec {
+  meta.ci.targets = [ "integration-tests" ] ++ lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+  passthru = (depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = ([ "cloud" "fuse" "tonic-reflection" ]
+      # virtiofs feature currently fails to build on Darwin
+      ++ lib.optional pkgs.stdenv.isLinux "virtiofs");
+    override.testPreRun = ''
+      export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt
     '';
-
-    # enable some optional features.
-    features = [ "default" "cloud" "integration" ];
+  }) // {
+    integration-tests = depot.tvix.crates.workspaceMembers.${old.crateName}.build.override (old: {
+      runTests = true;
+      testPreRun = ''
+        export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt;
+        export PATH="$PATH:${pkgs.lib.makeBinPath [ pkgs.cbtemulator pkgs.google-cloud-bigtable-tool ]}"
+      '';
+      features = old.features ++ [ "integration" ];
+    });
   };
 })
diff --git a/tvix/castore/src/blobservice/from_addr.rs b/tvix/castore/src/blobservice/from_addr.rs
index 8898bbfb95..f76592e509 100644
--- a/tvix/castore/src/blobservice/from_addr.rs
+++ b/tvix/castore/src/blobservice/from_addr.rs
@@ -30,8 +30,12 @@ pub async fn from_addr(uri: &str) -> Result<Box<dyn BlobService>, crate::Error>
             // - In the case of unix sockets, there must be a path, but may not be a host.
             // - In the case of non-unix sockets, there must be a host, but no path.
             // Constructing the channel is handled by tvix_castore::channel::from_url.
-            let client = BlobServiceClient::new(crate::tonic::channel_from_url(&url).await?);
-            Box::new(GRPCBlobService::from_client(client))
+            Box::new(GRPCBlobService::from_client(
+                BlobServiceClient::with_interceptor(
+                    crate::tonic::channel_from_url(&url).await?,
+                    tvix_tracing::propagate::tonic::send_trace,
+                ),
+            ))
         }
         scheme if scheme.starts_with("objectstore+") => {
             // We need to convert the URL to string, strip the prefix there, and then
diff --git a/tvix/castore/src/blobservice/grpc.rs b/tvix/castore/src/blobservice/grpc.rs
index 5663cd3838..834c0c7bf5 100644
--- a/tvix/castore/src/blobservice/grpc.rs
+++ b/tvix/castore/src/blobservice/grpc.rs
@@ -17,29 +17,33 @@ use tokio_util::{
     io::{CopyToBytes, SinkWriter},
     sync::PollSender,
 };
-use tonic::{async_trait, transport::Channel, Code, Status};
-use tracing::instrument;
+use tonic::{async_trait, Code, Status};
+use tracing::{instrument, Instrument as _};
 
 /// Connects to a (remote) tvix-store BlobService over gRPC.
 #[derive(Clone)]
-pub struct GRPCBlobService {
+pub struct GRPCBlobService<T> {
     /// The internal reference to a gRPC client.
     /// Cloning it is cheap, and it internally handles concurrent requests.
-    grpc_client: proto::blob_service_client::BlobServiceClient<Channel>,
+    grpc_client: proto::blob_service_client::BlobServiceClient<T>,
 }
 
-impl GRPCBlobService {
+impl<T> GRPCBlobService<T> {
     /// construct a [GRPCBlobService] from a [proto::blob_service_client::BlobServiceClient].
     /// panics if called outside the context of a tokio runtime.
-    pub fn from_client(
-        grpc_client: proto::blob_service_client::BlobServiceClient<Channel>,
-    ) -> Self {
+    pub fn from_client(grpc_client: proto::blob_service_client::BlobServiceClient<T>) -> Self {
         Self { grpc_client }
     }
 }
 
 #[async_trait]
-impl BlobService for GRPCBlobService {
+impl<T> BlobService for GRPCBlobService<T>
+where
+    T: tonic::client::GrpcService<tonic::body::BoxBody> + Send + Sync + Clone + 'static,
+    T::ResponseBody: tonic::codegen::Body<Data = tonic::codegen::Bytes> + Send + 'static,
+    <T::ResponseBody as tonic::codegen::Body>::Error: Into<tonic::codegen::StdError> + Send,
+    T::Future: Send,
+{
     #[instrument(skip(self, digest), fields(blob.digest=%digest))]
     async fn has(&self, digest: &B3Digest) -> io::Result<bool> {
         let mut grpc_client = self.grpc_client.clone();
@@ -133,6 +137,8 @@ impl BlobService for GRPCBlobService {
         let task = tokio::spawn({
             let mut grpc_client = self.grpc_client.clone();
             async move { Ok::<_, Status>(grpc_client.put(blobchunk_stream).await?.into_inner()) }
+                // instrument the task with the current span, this is not done by default
+                .in_current_span()
         });
 
         // The tx part of the channel is converted to a sink of byte chunks.
@@ -335,7 +341,6 @@ mod tests {
                     .await
                     .expect("must succeed"),
             );
-
             GRPCBlobService::from_client(client)
         };
 
diff --git a/tvix/castore/src/directoryservice/bigtable.rs b/tvix/castore/src/directoryservice/bigtable.rs
index 0fdb24628f..1194c6ddc9 100644
--- a/tvix/castore/src/directoryservice/bigtable.rs
+++ b/tvix/castore/src/directoryservice/bigtable.rs
@@ -343,7 +343,7 @@ impl DirectoryService for BigtableDirectoryService {
     fn get_recursive(
         &self,
         root_directory_digest: &B3Digest,
-    ) -> BoxStream<Result<proto::Directory, Error>> {
+    ) -> BoxStream<'static, Result<proto::Directory, Error>> {
         traverse_directory(self.clone(), root_directory_digest)
     }
 
diff --git a/tvix/castore/src/directoryservice/closure_validator.rs b/tvix/castore/src/directoryservice/closure_validator.rs
deleted file mode 100644
index 183928a86f..0000000000
--- a/tvix/castore/src/directoryservice/closure_validator.rs
+++ /dev/null
@@ -1,268 +0,0 @@
-use std::collections::{HashMap, HashSet};
-
-use bstr::ByteSlice;
-
-use petgraph::{
-    graph::{DiGraph, NodeIndex},
-    visit::Bfs,
-};
-use tracing::instrument;
-
-use crate::{
-    proto::{self, Directory},
-    B3Digest, Error,
-};
-
-/// This can be used to validate a Directory closure (DAG of connected
-/// Directories), and their insertion order.
-///
-/// Directories need to be inserted (via `add`), in an order from the leaves to
-/// the root (DFS Post-Order).
-/// During insertion, We validate as much as we can at that time:
-///
-///  - individual validation of Directory messages
-///  - validation of insertion order (no upload of not-yet-known Directories)
-///  - validation of size fields of referred Directories
-///
-/// Internally it keeps all received Directories in a directed graph,
-/// with node weights being the Directories and edges pointing to child
-/// directories.
-///
-/// Once all Directories have been inserted, a finalize function can be
-/// called to get a (deduplicated and) validated list of directories, in
-/// insertion order.
-/// During finalize, a check for graph connectivity is performed too, to ensure
-/// there's no disconnected components, and only one root.
-#[derive(Default)]
-pub struct ClosureValidator {
-    // A directed graph, using Directory as node weight, without edge weights.
-    // Edges point from parents to children.
-    graph: DiGraph<Directory, ()>,
-
-    // A lookup table from directory digest to node index.
-    digest_to_node_ix: HashMap<B3Digest, NodeIndex>,
-
-    /// Keeps track of the last-inserted directory graph node index.
-    /// On a correct insert, this will be the root node, from which the DFS post
-    /// order traversal will start from.
-    last_directory_ix: Option<NodeIndex>,
-}
-
-impl ClosureValidator {
-    /// Insert a new Directory into the closure.
-    /// Perform individual Directory validation, validation of insertion order
-    /// and size fields.
-    #[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest(), directory.size=%directory.size()), err)]
-    pub fn add(&mut self, directory: proto::Directory) -> Result<(), Error> {
-        let digest = directory.digest();
-
-        // If we already saw this node previously, it's already validated and in the graph.
-        if self.digest_to_node_ix.contains_key(&digest) {
-            return Ok(());
-        }
-
-        // Do some general validation
-        directory
-            .validate()
-            .map_err(|e| Error::InvalidRequest(e.to_string()))?;
-
-        // Ensure the directory only refers to directories which we already accepted.
-        // We lookup their node indices and add them to a HashSet.
-        let mut child_ixs = HashSet::new();
-        for dir in &directory.directories {
-            let child_digest = B3Digest::try_from(dir.digest.to_owned()).unwrap(); // validated
-
-            // Ensure the digest has already been seen
-            let child_ix = *self.digest_to_node_ix.get(&child_digest).ok_or_else(|| {
-                Error::InvalidRequest(format!(
-                    "'{}' refers to unseen child dir: {}",
-                    dir.name.as_bstr(),
-                    &child_digest
-                ))
-            })?;
-
-            // Ensure the size specified in the child node matches the directory size itself.
-            let recorded_child_size = self
-                .graph
-                .node_weight(child_ix)
-                .expect("node not found")
-                .size();
-
-            // Ensure the size specified in the child node matches our records.
-            if dir.size != recorded_child_size {
-                return Err(Error::InvalidRequest(format!(
-                    "'{}' has wrong size, specified {}, recorded {}",
-                    dir.name.as_bstr(),
-                    dir.size,
-                    recorded_child_size
-                )));
-            }
-
-            child_ixs.insert(child_ix);
-        }
-
-        // Insert node into the graph, and add edges to all children.
-        let node_ix = self.graph.add_node(directory);
-        for child_ix in child_ixs {
-            self.graph.add_edge(node_ix, child_ix, ());
-        }
-
-        // Record the mapping from digest to node_ix in our lookup table.
-        self.digest_to_node_ix.insert(digest, node_ix);
-
-        // Update last_directory_ix.
-        self.last_directory_ix = Some(node_ix);
-
-        Ok(())
-    }
-
-    /// Ensure that all inserted Directories are connected, then return a
-    /// (deduplicated) and validated list of directories, in from-leaves-to-root
-    /// order.
-    /// In case no elements have been inserted, returns an empty list.
-    #[instrument(level = "trace", skip_all, err)]
-    pub(crate) fn finalize(self) -> Result<Vec<Directory>, Error> {
-        // If no nodes were inserted, an empty list is returned.
-        let last_directory_ix = if let Some(x) = self.last_directory_ix {
-            x
-        } else {
-            return Ok(vec![]);
-        };
-
-        // do a BFS traversal of the graph, starting with the root node to get
-        // (the count of) all nodes reachable from there.
-        let mut traversal = Bfs::new(&self.graph, last_directory_ix);
-
-        let mut visited_directory_count = 0;
-        #[cfg(debug_assertions)]
-        let mut visited_directory_ixs = HashSet::new();
-        #[cfg_attr(not(debug_assertions), allow(unused))]
-        while let Some(directory_ix) = traversal.next(&self.graph) {
-            #[cfg(debug_assertions)]
-            visited_directory_ixs.insert(directory_ix);
-
-            visited_directory_count += 1;
-        }
-
-        // If the number of nodes collected equals the total number of nodes in
-        // the graph, we know all nodes are connected.
-        if visited_directory_count != self.graph.node_count() {
-            // more or less exhaustive error reporting.
-            #[cfg(debug_assertions)]
-            {
-                let all_directory_ixs: HashSet<_> = self.graph.node_indices().collect();
-
-                let unvisited_directories: HashSet<_> = all_directory_ixs
-                    .difference(&visited_directory_ixs)
-                    .map(|ix| self.graph.node_weight(*ix).expect("node not found"))
-                    .collect();
-
-                return Err(Error::InvalidRequest(format!(
-                    "found {} disconnected directories: {:?}",
-                    self.graph.node_count() - visited_directory_ixs.len(),
-                    unvisited_directories
-                )));
-            }
-            #[cfg(not(debug_assertions))]
-            {
-                return Err(Error::InvalidRequest(format!(
-                    "found {} disconnected directories",
-                    self.graph.node_count() - visited_directory_count
-                )));
-            }
-        }
-
-        // Dissolve the graph, returning the nodes as a Vec.
-        // As the graph was populated in a valid DFS PostOrder, we can return
-        // nodes in that same order.
-        let (nodes, _edges) = self.graph.into_nodes_edges();
-        Ok(nodes.into_iter().map(|x| x.weight).collect())
-    }
-}
-
-#[cfg(test)]
-mod tests {
-    use crate::{
-        fixtures::{DIRECTORY_A, DIRECTORY_B, DIRECTORY_C},
-        proto::{self, Directory},
-    };
-    use lazy_static::lazy_static;
-    use rstest::rstest;
-
-    lazy_static! {
-        pub static ref BROKEN_DIRECTORY : Directory = Directory {
-            symlinks: vec![proto::SymlinkNode {
-                name: "".into(), // invalid name!
-                target: "doesntmatter".into(),
-            }],
-            ..Default::default()
-        };
-
-        pub static ref BROKEN_PARENT_DIRECTORY: Directory = Directory {
-            directories: vec![proto::DirectoryNode {
-                name: "foo".into(),
-                digest: DIRECTORY_A.digest().into(),
-                size: DIRECTORY_A.size() + 42, // wrong!
-            }],
-            ..Default::default()
-        };
-    }
-
-    use super::ClosureValidator;
-
-    #[rstest]
-    /// Uploading an empty directory should succeed.
-    #[case::empty_directory(&[&*DIRECTORY_A], false, Some(vec![&*DIRECTORY_A]))]
-    /// Uploading A, then B (referring to A) should succeed.
-    #[case::simple_closure(&[&*DIRECTORY_A, &*DIRECTORY_B], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_B]))]
-    /// Uploading A, then A, then C (referring to A twice) should succeed.
-    /// We pretend to be a dumb client not deduping directories.
-    #[case::same_child(&[&*DIRECTORY_A, &*DIRECTORY_A, &*DIRECTORY_C], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_C]))]
-    /// Uploading A, then C (referring to A twice) should succeed.
-    #[case::same_child_dedup(&[&*DIRECTORY_A, &*DIRECTORY_C], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_C]))]
-    /// Uploading A, then C (referring to A twice), then B (itself referring to A) should fail during close,
-    /// as B itself would be left unconnected.
-    #[case::unconnected_node(&[&*DIRECTORY_A, &*DIRECTORY_C, &*DIRECTORY_B], false, None)]
-    /// Uploading B (referring to A) should fail immediately, because A was never uploaded.
-    #[case::dangling_pointer(&[&*DIRECTORY_B], true, None)]
-    /// Uploading a directory failing validation should fail immediately.
-    #[case::failing_validation(&[&*BROKEN_DIRECTORY], true, None)]
-    /// Uploading a directory which refers to another Directory with a wrong size should fail.
-    #[case::wrong_size_in_parent(&[&*DIRECTORY_A, &*BROKEN_PARENT_DIRECTORY], true, None)]
-    fn test_uploads(
-        #[case] directories_to_upload: &[&Directory],
-        #[case] exp_fail_upload_last: bool,
-        #[case] exp_finalize: Option<Vec<&Directory>>, // Some(_) if finalize successful, None if not.
-    ) {
-        let mut dcv = ClosureValidator::default();
-        let len_directories_to_upload = directories_to_upload.len();
-
-        for (i, d) in directories_to_upload.iter().enumerate() {
-            let resp = dcv.add((*d).clone());
-            if i == len_directories_to_upload - 1 && exp_fail_upload_last {
-                assert!(resp.is_err(), "expect last put to fail");
-
-                // We don't really care anymore what finalize() would return, as
-                // the add() failed.
-                return;
-            } else {
-                assert!(resp.is_ok(), "expect put to succeed");
-            }
-        }
-
-        // everything was uploaded successfully. Test finalize().
-        let resp = dcv.finalize();
-
-        match exp_finalize {
-            Some(directories) => {
-                assert_eq!(
-                    Vec::from_iter(directories.iter().map(|e| (*e).to_owned())),
-                    resp.expect("drain should succeed")
-                );
-            }
-            None => {
-                resp.expect_err("drain should fail");
-            }
-        }
-    }
-}
diff --git a/tvix/castore/src/directoryservice/combinators.rs b/tvix/castore/src/directoryservice/combinators.rs
new file mode 100644
index 0000000000..d3f351d6b6
--- /dev/null
+++ b/tvix/castore/src/directoryservice/combinators.rs
@@ -0,0 +1,142 @@
+use futures::stream::BoxStream;
+use futures::StreamExt;
+use futures::TryFutureExt;
+use futures::TryStreamExt;
+use tonic::async_trait;
+use tracing::{instrument, trace};
+
+use super::{DirectoryGraph, DirectoryService, RootToLeavesValidator, SimplePutter};
+use crate::directoryservice::DirectoryPutter;
+use crate::proto;
+use crate::B3Digest;
+use crate::Error;
+
+/// Asks near first, if not found, asks far.
+/// If found in there, returns it, and *inserts* it into
+/// near.
+/// Specifically, it always obtains the entire directory closure from far and inserts it into near,
+/// which is useful when far does not support accessing intermediate directories (but near does).
+/// There is no negative cache.
+/// Inserts and listings are not implemented for now.
+#[derive(Clone)]
+pub struct Cache<DS1, DS2> {
+    near: DS1,
+    far: DS2,
+}
+
+impl<DS1, DS2> Cache<DS1, DS2> {
+    pub fn new(near: DS1, far: DS2) -> Self {
+        Self { near, far }
+    }
+}
+
+#[async_trait]
+impl<DS1, DS2> DirectoryService for Cache<DS1, DS2>
+where
+    DS1: DirectoryService + Clone + 'static,
+    DS2: DirectoryService + Clone + 'static,
+{
+    #[instrument(skip(self, digest), fields(directory.digest = %digest))]
+    async fn get(&self, digest: &B3Digest) -> Result<Option<proto::Directory>, Error> {
+        match self.near.get(digest).await? {
+            Some(directory) => {
+                trace!("serving from cache");
+                Ok(Some(directory))
+            }
+            None => {
+                trace!("not found in near, asking remote…");
+
+                let mut copy = DirectoryGraph::with_order(
+                    RootToLeavesValidator::new_with_root_digest(digest.clone()),
+                );
+
+                let mut stream = self.far.get_recursive(digest);
+                let root = stream.try_next().await?;
+
+                if let Some(root) = root.clone() {
+                    copy.add(root)
+                        .map_err(|e| Error::StorageError(e.to_string()))?;
+                }
+
+                while let Some(dir) = stream.try_next().await? {
+                    copy.add(dir)
+                        .map_err(|e| Error::StorageError(e.to_string()))?;
+                }
+
+                let copy = copy
+                    .validate()
+                    .map_err(|e| Error::StorageError(e.to_string()))?;
+
+                let mut put = self.near.put_multiple_start();
+                for dir in copy.drain_leaves_to_root() {
+                    put.put(dir).await?;
+                }
+                put.close().await?;
+
+                Ok(root)
+            }
+        }
+    }
+
+    #[instrument(skip_all)]
+    async fn put(&self, _directory: proto::Directory) -> Result<B3Digest, Error> {
+        Err(Error::StorageError("unimplemented".to_string()))
+    }
+
+    #[instrument(skip_all, fields(directory.digest = %root_directory_digest))]
+    fn get_recursive(
+        &self,
+        root_directory_digest: &B3Digest,
+    ) -> BoxStream<'static, Result<proto::Directory, Error>> {
+        let near = self.near.clone();
+        let far = self.far.clone();
+        let digest = root_directory_digest.clone();
+        Box::pin(
+            (async move {
+                let mut stream = near.get_recursive(&digest);
+                match stream.try_next().await? {
+                    Some(first) => {
+                        trace!("serving from cache");
+                        Ok(futures::stream::once(async { Ok(first) })
+                            .chain(stream)
+                            .left_stream())
+                    }
+                    None => {
+                        trace!("not found in near, asking remote…");
+
+                        let mut copy_for_near = DirectoryGraph::with_order(
+                            RootToLeavesValidator::new_with_root_digest(digest.clone()),
+                        );
+                        let mut copy_for_client = vec![];
+
+                        let mut stream = far.get_recursive(&digest);
+                        while let Some(dir) = stream.try_next().await? {
+                            copy_for_near
+                                .add(dir.clone())
+                                .map_err(|e| Error::StorageError(e.to_string()))?;
+                            copy_for_client.push(dir);
+                        }
+
+                        let copy_for_near = copy_for_near
+                            .validate()
+                            .map_err(|e| Error::StorageError(e.to_string()))?;
+                        let mut put = near.put_multiple_start();
+                        for dir in copy_for_near.drain_leaves_to_root() {
+                            put.put(dir).await?;
+                        }
+                        put.close().await?;
+
+                        Ok(futures::stream::iter(copy_for_client.into_iter().map(Ok))
+                            .right_stream())
+                    }
+                }
+            })
+            .try_flatten_stream(),
+        )
+    }
+
+    #[instrument(skip_all)]
+    fn put_multiple_start(&self) -> Box<(dyn DirectoryPutter + 'static)> {
+        Box::new(SimplePutter::new((*self).clone()))
+    }
+}
diff --git a/tvix/castore/src/directoryservice/directory_graph.rs b/tvix/castore/src/directoryservice/directory_graph.rs
new file mode 100644
index 0000000000..e6b9b16337
--- /dev/null
+++ b/tvix/castore/src/directoryservice/directory_graph.rs
@@ -0,0 +1,413 @@
+use std::collections::HashMap;
+
+use bstr::ByteSlice;
+
+use petgraph::{
+    graph::{DiGraph, NodeIndex},
+    visit::{Bfs, DfsPostOrder, EdgeRef, IntoNodeIdentifiers, Walker},
+    Direction, Incoming,
+};
+use tracing::instrument;
+
+use super::order_validator::{LeavesToRootValidator, OrderValidator, RootToLeavesValidator};
+use crate::{
+    proto::{self, Directory, DirectoryNode},
+    B3Digest,
+};
+
+#[derive(thiserror::Error, Debug)]
+pub enum Error {
+    #[error("{0}")]
+    ValidationError(String),
+}
+
+/// This can be used to validate and/or re-order a Directory closure (DAG of
+/// connected Directories), and their insertion order.
+///
+/// The DirectoryGraph is parametrized on the insertion order, and can be
+/// constructed using the Default trait, or using `with_order` if the
+/// OrderValidator needs to be customized.
+///
+/// If the user is receiving directories from canonical protobuf encoding in
+/// root-to-leaves order, and parsing them, she can call `digest_allowed`
+/// _before_ parsing the protobuf record and then add it with `add_unchecked`.
+/// All other users insert the directories via `add`, in their specified order.
+/// During insertion, we validate as much as we can at that time:
+///
+///  - individual validation of Directory messages
+///  - validation of insertion order
+///  - validation of size fields of referred Directories
+///
+/// Internally it keeps all received Directories in a directed graph,
+/// with node weights being the Directories and edges pointing to child/parent
+/// directories.
+///
+/// Once all Directories have been inserted, a validate function can be
+/// called to perform a check for graph connectivity and ensure there's no
+/// disconnected components or missing nodes.
+/// Finally, the `drain_leaves_to_root` or `drain_root_to_leaves` can be
+/// _chained_ on validate to get an iterator over the (deduplicated and)
+/// validated list of directories in either order.
+#[derive(Default)]
+pub struct DirectoryGraph<O> {
+    // A directed graph, using Directory as node weight.
+    // Edges point from parents to children.
+    //
+    // Nodes with None weigths might exist when a digest has been referred to but the directory
+    // with this digest has not yet been sent.
+    //
+    // The option in the edge weight tracks the pending validation state of the respective edge, for example if
+    // the child has not been added yet.
+    graph: DiGraph<Option<Directory>, Option<DirectoryNode>>,
+
+    // A lookup table from directory digest to node index.
+    digest_to_node_ix: HashMap<B3Digest, NodeIndex>,
+
+    order_validator: O,
+}
+
+pub struct ValidatedDirectoryGraph {
+    graph: DiGraph<Option<Directory>, Option<DirectoryNode>>,
+
+    root: Option<NodeIndex>,
+}
+
+fn check_edge(dir: &DirectoryNode, child: &Directory) -> Result<(), Error> {
+    // Ensure the size specified in the child node matches our records.
+    if dir.size != child.size() {
+        return Err(Error::ValidationError(format!(
+            "'{}' has wrong size, specified {}, recorded {}",
+            dir.name.as_bstr(),
+            dir.size,
+            child.size(),
+        )));
+    }
+    Ok(())
+}
+
+impl DirectoryGraph<LeavesToRootValidator> {
+    /// Insert a new Directory into the closure
+    #[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest(), directory.size=%directory.size()), err)]
+    pub fn add(&mut self, directory: proto::Directory) -> Result<(), Error> {
+        if !self.order_validator.add_directory(&directory) {
+            return Err(Error::ValidationError(
+                "unknown directory was referenced".into(),
+            ));
+        }
+        self.add_order_unchecked(directory)
+    }
+}
+
+impl DirectoryGraph<RootToLeavesValidator> {
+    /// If the user is parsing directories from canonical protobuf encoding, she can
+    /// call `digest_allowed` _before_ parsing the protobuf record and then add it
+    /// with `add_unchecked`.
+    pub fn digest_allowed(&self, digest: B3Digest) -> bool {
+        self.order_validator.digest_allowed(&digest)
+    }
+
+    /// Insert a new Directory into the closure
+    #[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest(), directory.size=%directory.size()), err)]
+    pub fn add(&mut self, directory: proto::Directory) -> Result<(), Error> {
+        let digest = directory.digest();
+        if !self.order_validator.digest_allowed(&digest) {
+            return Err(Error::ValidationError("unexpected digest".into()));
+        }
+        self.order_validator.add_directory_unchecked(&directory);
+        self.add_order_unchecked(directory)
+    }
+}
+
+impl<O: OrderValidator> DirectoryGraph<O> {
+    /// Customize the ordering, i.e. for pre-setting the root of the RootToLeavesValidator
+    pub fn with_order(order_validator: O) -> Self {
+        Self {
+            graph: Default::default(),
+            digest_to_node_ix: Default::default(),
+            order_validator,
+        }
+    }
+
+    /// Adds a directory which has already been confirmed to be in-order to the graph
+    pub fn add_order_unchecked(&mut self, directory: proto::Directory) -> Result<(), Error> {
+        // Do some basic validation
+        directory
+            .validate()
+            .map_err(|e| Error::ValidationError(e.to_string()))?;
+
+        let digest = directory.digest();
+
+        // Teach the graph about the existence of a node with this digest
+        let ix = *self
+            .digest_to_node_ix
+            .entry(digest)
+            .or_insert_with(|| self.graph.add_node(None));
+
+        if self.graph[ix].is_some() {
+            // The node is already in the graph, there is nothing to do here.
+            return Ok(());
+        }
+
+        // set up edges to all child directories
+        for subdir in &directory.directories {
+            let subdir_digest: B3Digest = subdir.digest.clone().try_into().unwrap();
+
+            let child_ix = *self
+                .digest_to_node_ix
+                .entry(subdir_digest)
+                .or_insert_with(|| self.graph.add_node(None));
+
+            let pending_edge_check = match &self.graph[child_ix] {
+                Some(child) => {
+                    // child is already available, validate the edge now
+                    check_edge(subdir, child)?;
+                    None
+                }
+                None => Some(subdir.clone()), // pending validation
+            };
+            self.graph.add_edge(ix, child_ix, pending_edge_check);
+        }
+
+        // validate the edges from parents to this node
+        // this collects edge ids in a Vec because there is no edges_directed_mut :'c
+        for edge_id in self
+            .graph
+            .edges_directed(ix, Direction::Incoming)
+            .map(|edge_ref| edge_ref.id())
+            .collect::<Vec<_>>()
+            .into_iter()
+        {
+            let edge_weight = self
+                .graph
+                .edge_weight_mut(edge_id)
+                .expect("edge not found")
+                .take()
+                .expect("edge is already validated");
+            check_edge(&edge_weight, &directory)?;
+        }
+
+        // finally, store the directory information in the node weight
+        self.graph[ix] = Some(directory);
+
+        Ok(())
+    }
+
+    #[instrument(level = "trace", skip_all, err)]
+    pub fn validate(self) -> Result<ValidatedDirectoryGraph, Error> {
+        // find all initial nodes (nodes without incoming edges)
+        let mut roots = self
+            .graph
+            .node_identifiers()
+            .filter(|&a| self.graph.neighbors_directed(a, Incoming).next().is_none());
+
+        let root = roots.next();
+        if roots.next().is_some() {
+            return Err(Error::ValidationError(
+                "graph has disconnected roots".into(),
+            ));
+        }
+
+        // test that the graph is complete
+        if self.graph.raw_nodes().iter().any(|n| n.weight.is_none()) {
+            return Err(Error::ValidationError("graph is incomplete".into()));
+        }
+
+        Ok(ValidatedDirectoryGraph {
+            graph: self.graph,
+            root,
+        })
+    }
+}
+
+impl ValidatedDirectoryGraph {
+    /// Return the list of directories in from-root-to-leaves order.
+    /// In case no elements have been inserted, returns an empty list.
+    ///
+    /// panics if the specified root is not in the graph
+    #[instrument(level = "trace", skip_all)]
+    pub fn drain_root_to_leaves(self) -> impl Iterator<Item = Directory> {
+        let order = match self.root {
+            Some(root) => {
+                // do a BFS traversal of the graph, starting with the root node
+                Bfs::new(&self.graph, root)
+                    .iter(&self.graph)
+                    .collect::<Vec<_>>()
+            }
+            None => vec![], // No nodes have been inserted, do not traverse
+        };
+
+        let (mut nodes, _edges) = self.graph.into_nodes_edges();
+
+        order
+            .into_iter()
+            .filter_map(move |i| nodes[i.index()].weight.take())
+    }
+
+    /// Return the list of directories in from-leaves-to-root order.
+    /// In case no elements have been inserted, returns an empty list.
+    ///
+    /// panics when the specified root is not in the graph
+    #[instrument(level = "trace", skip_all)]
+    pub fn drain_leaves_to_root(self) -> impl Iterator<Item = Directory> {
+        let order = match self.root {
+            Some(root) => {
+                // do a DFS Post-Order traversal of the graph, starting with the root node
+                DfsPostOrder::new(&self.graph, root)
+                    .iter(&self.graph)
+                    .collect::<Vec<_>>()
+            }
+            None => vec![], // No nodes have been inserted, do not traverse
+        };
+
+        let (mut nodes, _edges) = self.graph.into_nodes_edges();
+
+        order
+            .into_iter()
+            .filter_map(move |i| nodes[i.index()].weight.take())
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::{
+        fixtures::{DIRECTORY_A, DIRECTORY_B, DIRECTORY_C},
+        proto::{self, Directory},
+    };
+    use lazy_static::lazy_static;
+    use rstest::rstest;
+
+    lazy_static! {
+        pub static ref BROKEN_DIRECTORY : Directory = Directory {
+            symlinks: vec![proto::SymlinkNode {
+                name: "".into(), // invalid name!
+                target: "doesntmatter".into(),
+            }],
+            ..Default::default()
+        };
+
+        pub static ref BROKEN_PARENT_DIRECTORY: Directory = Directory {
+            directories: vec![proto::DirectoryNode {
+                name: "foo".into(),
+                digest: DIRECTORY_A.digest().into(),
+                size: DIRECTORY_A.size() + 42, // wrong!
+            }],
+            ..Default::default()
+        };
+    }
+
+    use super::{DirectoryGraph, LeavesToRootValidator, RootToLeavesValidator};
+
+    #[rstest]
+    /// Uploading an empty directory should succeed.
+    #[case::empty_directory(&[&*DIRECTORY_A], false, Some(vec![&*DIRECTORY_A]))]
+    /// Uploading A, then B (referring to A) should succeed.
+    #[case::simple_closure(&[&*DIRECTORY_A, &*DIRECTORY_B], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_B]))]
+    /// Uploading A, then A, then C (referring to A twice) should succeed.
+    /// We pretend to be a dumb client not deduping directories.
+    #[case::same_child(&[&*DIRECTORY_A, &*DIRECTORY_A, &*DIRECTORY_C], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_C]))]
+    /// Uploading A, then C (referring to A twice) should succeed.
+    #[case::same_child_dedup(&[&*DIRECTORY_A, &*DIRECTORY_C], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_C]))]
+    /// Uploading A, then C (referring to A twice), then B (itself referring to A) should fail during close,
+    /// as B itself would be left unconnected.
+    #[case::unconnected_node(&[&*DIRECTORY_A, &*DIRECTORY_C, &*DIRECTORY_B], false, None)]
+    /// Uploading B (referring to A) should fail immediately, because A was never uploaded.
+    #[case::dangling_pointer(&[&*DIRECTORY_B], true, None)]
+    /// Uploading a directory failing validation should fail immediately.
+    #[case::failing_validation(&[&*BROKEN_DIRECTORY], true, None)]
+    /// Uploading a directory which refers to another Directory with a wrong size should fail.
+    #[case::wrong_size_in_parent(&[&*DIRECTORY_A, &*BROKEN_PARENT_DIRECTORY], true, None)]
+    fn test_uploads(
+        #[case] directories_to_upload: &[&Directory],
+        #[case] exp_fail_upload_last: bool,
+        #[case] exp_finalize: Option<Vec<&Directory>>, // Some(_) if finalize successful, None if not.
+    ) {
+        let mut dcv = DirectoryGraph::<LeavesToRootValidator>::default();
+        let len_directories_to_upload = directories_to_upload.len();
+
+        for (i, d) in directories_to_upload.iter().enumerate() {
+            let resp = dcv.add((*d).clone());
+            if i == len_directories_to_upload - 1 && exp_fail_upload_last {
+                assert!(resp.is_err(), "expect last put to fail");
+
+                // We don't really care anymore what finalize() would return, as
+                // the add() failed.
+                return;
+            } else {
+                assert!(resp.is_ok(), "expect put to succeed");
+            }
+        }
+
+        // everything was uploaded successfully. Test finalize().
+        let resp = dcv
+            .validate()
+            .map(|validated| validated.drain_leaves_to_root().collect::<Vec<_>>());
+
+        match exp_finalize {
+            Some(directories) => {
+                assert_eq!(
+                    Vec::from_iter(directories.iter().map(|e| (*e).to_owned())),
+                    resp.expect("drain should succeed")
+                );
+            }
+            None => {
+                resp.expect_err("drain should fail");
+            }
+        }
+    }
+
+    #[rstest]
+    /// Downloading an empty directory should succeed.
+    #[case::empty_directory(&*DIRECTORY_A, &[&*DIRECTORY_A], false, Some(vec![&*DIRECTORY_A]))]
+    /// Downlading B, then A (referenced by B) should succeed.
+    #[case::simple_closure(&*DIRECTORY_B, &[&*DIRECTORY_B, &*DIRECTORY_A], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_B]))]
+    /// Downloading C (referring to A twice), then A should succeed.
+    #[case::same_child_dedup(&*DIRECTORY_C, &[&*DIRECTORY_C, &*DIRECTORY_A], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_C]))]
+    /// Downloading C, then B (both referring to A but not referring to each other) should fail immediately as B has no connection to C (the root)
+    #[case::unconnected_node(&*DIRECTORY_C, &[&*DIRECTORY_C, &*DIRECTORY_B], true, None)]
+    /// Downloading B (specified as the root) but receiving A instead should fail immediately, because A has no connection to B (the root).
+    #[case::dangling_pointer(&*DIRECTORY_B, &[&*DIRECTORY_A], true, None)]
+    /// Downloading a directory failing validation should fail immediately.
+    #[case::failing_validation(&*BROKEN_DIRECTORY, &[&*BROKEN_DIRECTORY], true, None)]
+    /// Downloading a directory which refers to another Directory with a wrong size should fail.
+    #[case::wrong_size_in_parent(&*BROKEN_PARENT_DIRECTORY, &[&*BROKEN_PARENT_DIRECTORY, &*DIRECTORY_A], true, None)]
+    fn test_downloads(
+        #[case] root: &Directory,
+        #[case] directories_to_upload: &[&Directory],
+        #[case] exp_fail_upload_last: bool,
+        #[case] exp_finalize: Option<Vec<&Directory>>, // Some(_) if finalize successful, None if not.
+    ) {
+        let mut dcv =
+            DirectoryGraph::with_order(RootToLeavesValidator::new_with_root_digest(root.digest()));
+        let len_directories_to_upload = directories_to_upload.len();
+
+        for (i, d) in directories_to_upload.iter().enumerate() {
+            let resp = dcv.add((*d).clone());
+            if i == len_directories_to_upload - 1 && exp_fail_upload_last {
+                assert!(resp.is_err(), "expect last put to fail");
+
+                // We don't really care anymore what finalize() would return, as
+                // the add() failed.
+                return;
+            } else {
+                assert!(resp.is_ok(), "expect put to succeed");
+            }
+        }
+
+        // everything was uploaded successfully. Test finalize().
+        let resp = dcv
+            .validate()
+            .map(|validated| validated.drain_leaves_to_root().collect::<Vec<_>>());
+
+        match exp_finalize {
+            Some(directories) => {
+                assert_eq!(
+                    Vec::from_iter(directories.iter().map(|e| (*e).to_owned())),
+                    resp.expect("drain should succeed")
+                );
+            }
+            None => {
+                resp.expect_err("drain should fail");
+            }
+        }
+    }
+}
diff --git a/tvix/castore/src/directoryservice/from_addr.rs b/tvix/castore/src/directoryservice/from_addr.rs
index ae51df6376..9aa01df171 100644
--- a/tvix/castore/src/directoryservice/from_addr.rs
+++ b/tvix/castore/src/directoryservice/from_addr.rs
@@ -2,7 +2,10 @@ use url::Url;
 
 use crate::{proto::directory_service_client::DirectoryServiceClient, Error};
 
-use super::{DirectoryService, GRPCDirectoryService, MemoryDirectoryService, SledDirectoryService};
+use super::{
+    DirectoryService, GRPCDirectoryService, MemoryDirectoryService, ObjectStoreDirectoryService,
+    SledDirectoryService,
+};
 
 /// Constructs a new instance of a [DirectoryService] from an URI.
 ///
@@ -60,8 +63,24 @@ pub async fn from_addr(uri: &str) -> Result<Box<dyn DirectoryService>, crate::Er
             // - In the case of unix sockets, there must be a path, but may not be a host.
             // - In the case of non-unix sockets, there must be a host, but no path.
             // Constructing the channel is handled by tvix_castore::channel::from_url.
-            let client = DirectoryServiceClient::new(crate::tonic::channel_from_url(&url).await?);
-            Box::new(GRPCDirectoryService::from_client(client))
+            Box::new(GRPCDirectoryService::from_client(
+                DirectoryServiceClient::with_interceptor(
+                    crate::tonic::channel_from_url(&url).await?,
+                    tvix_tracing::propagate::tonic::send_trace,
+                ),
+            ))
+        }
+        scheme if scheme.starts_with("objectstore+") => {
+            // We need to convert the URL to string, strip the prefix there, and then
+            // parse it back as url, as Url::set_scheme() rejects some of the transitions we want to do.
+            let trimmed_url = {
+                let s = url.to_string();
+                Url::parse(s.strip_prefix("objectstore+").unwrap()).unwrap()
+            };
+            Box::new(
+                ObjectStoreDirectoryService::parse_url(&trimmed_url)
+                    .map_err(|e| Error::StorageError(e.to_string()))?,
+            )
         }
         #[cfg(feature = "cloud")]
         "bigtable" => {
diff --git a/tvix/castore/src/directoryservice/grpc.rs b/tvix/castore/src/directoryservice/grpc.rs
index 7402fe1b56..ca9b0de07b 100644
--- a/tvix/castore/src/directoryservice/grpc.rs
+++ b/tvix/castore/src/directoryservice/grpc.rs
@@ -9,31 +9,35 @@ use tokio::spawn;
 use tokio::sync::mpsc::UnboundedSender;
 use tokio::task::JoinHandle;
 use tokio_stream::wrappers::UnboundedReceiverStream;
-use tonic::async_trait;
-use tonic::Code;
-use tonic::{transport::Channel, Status};
-use tracing::{instrument, warn};
+use tonic::{async_trait, Code, Status};
+use tracing::{instrument, warn, Instrument as _};
 
 /// Connects to a (remote) tvix-store DirectoryService over gRPC.
 #[derive(Clone)]
-pub struct GRPCDirectoryService {
+pub struct GRPCDirectoryService<T> {
     /// The internal reference to a gRPC client.
     /// Cloning it is cheap, and it internally handles concurrent requests.
-    grpc_client: proto::directory_service_client::DirectoryServiceClient<Channel>,
+    grpc_client: proto::directory_service_client::DirectoryServiceClient<T>,
 }
 
-impl GRPCDirectoryService {
+impl<T> GRPCDirectoryService<T> {
     /// construct a [GRPCDirectoryService] from a [proto::directory_service_client::DirectoryServiceClient].
     /// panics if called outside the context of a tokio runtime.
     pub fn from_client(
-        grpc_client: proto::directory_service_client::DirectoryServiceClient<Channel>,
+        grpc_client: proto::directory_service_client::DirectoryServiceClient<T>,
     ) -> Self {
         Self { grpc_client }
     }
 }
 
 #[async_trait]
-impl DirectoryService for GRPCDirectoryService {
+impl<T> DirectoryService for GRPCDirectoryService<T>
+where
+    T: tonic::client::GrpcService<tonic::body::BoxBody> + Send + Sync + Clone + 'static,
+    T::ResponseBody: tonic::codegen::Body<Data = tonic::codegen::Bytes> + Send + 'static,
+    <T::ResponseBody as tonic::codegen::Body>::Error: Into<tonic::codegen::StdError> + Send,
+    T::Future: Send,
+{
     #[instrument(level = "trace", skip_all, fields(directory.digest = %digest))]
     async fn get(
         &self,
@@ -107,7 +111,7 @@ impl DirectoryService for GRPCDirectoryService {
     fn get_recursive(
         &self,
         root_directory_digest: &B3Digest,
-    ) -> BoxStream<Result<proto::Directory, Error>> {
+    ) -> BoxStream<'static, Result<proto::Directory, Error>> {
         let mut grpc_client = self.grpc_client.clone();
         let root_directory_digest = root_directory_digest.clone();
 
@@ -194,14 +198,17 @@ impl DirectoryService for GRPCDirectoryService {
 
         let (tx, rx) = tokio::sync::mpsc::unbounded_channel();
 
-        let task: JoinHandle<Result<proto::PutDirectoryResponse, Status>> = spawn(async move {
-            let s = grpc_client
-                .put(UnboundedReceiverStream::new(rx))
-                .await?
-                .into_inner();
+        let task: JoinHandle<Result<proto::PutDirectoryResponse, Status>> = spawn(
+            async move {
+                let s = grpc_client
+                    .put(UnboundedReceiverStream::new(rx))
+                    .await?
+                    .into_inner();
 
-            Ok(s)
-        });
+                Ok(s)
+            } // instrument the task with the current span, this is not done by default
+            .in_current_span(),
+        );
 
         Box::new(GRPCPutter {
             rq: Some((task, tx)),
diff --git a/tvix/castore/src/directoryservice/memory.rs b/tvix/castore/src/directoryservice/memory.rs
index 3d1c5009be..3b2795c396 100644
--- a/tvix/castore/src/directoryservice/memory.rs
+++ b/tvix/castore/src/directoryservice/memory.rs
@@ -73,7 +73,7 @@ impl DirectoryService for MemoryDirectoryService {
     fn get_recursive(
         &self,
         root_directory_digest: &B3Digest,
-    ) -> BoxStream<Result<proto::Directory, Error>> {
+    ) -> BoxStream<'static, Result<proto::Directory, Error>> {
         traverse_directory(self.clone(), root_directory_digest)
     }
 
diff --git a/tvix/castore/src/directoryservice/mod.rs b/tvix/castore/src/directoryservice/mod.rs
index cf6bea39d8..eff4a685fa 100644
--- a/tvix/castore/src/directoryservice/mod.rs
+++ b/tvix/castore/src/directoryservice/mod.rs
@@ -2,10 +2,13 @@ use crate::{proto, B3Digest, Error};
 use futures::stream::BoxStream;
 use tonic::async_trait;
 
-mod closure_validator;
+mod combinators;
+mod directory_graph;
 mod from_addr;
 mod grpc;
 mod memory;
+mod object_store;
+mod order_validator;
 mod simple_putter;
 mod sled;
 #[cfg(test)]
@@ -13,10 +16,13 @@ pub mod tests;
 mod traverse;
 mod utils;
 
-pub use self::closure_validator::ClosureValidator;
+pub use self::combinators::Cache;
+pub use self::directory_graph::DirectoryGraph;
 pub use self::from_addr::from_addr;
 pub use self::grpc::GRPCDirectoryService;
 pub use self::memory::MemoryDirectoryService;
+pub use self::object_store::ObjectStoreDirectoryService;
+pub use self::order_validator::{LeavesToRootValidator, OrderValidator, RootToLeavesValidator};
 pub use self::simple_putter::SimplePutter;
 pub use self::sled::SledDirectoryService;
 pub use self::traverse::descend_to;
@@ -64,7 +70,7 @@ pub trait DirectoryService: Send + Sync {
     fn get_recursive(
         &self,
         root_directory_digest: &B3Digest,
-    ) -> BoxStream<Result<proto::Directory, Error>>;
+    ) -> BoxStream<'static, Result<proto::Directory, Error>>;
 
     /// Allows persisting a closure of [proto::Directory], which is a graph of
     /// connected Directory messages.
@@ -87,7 +93,7 @@ where
     fn get_recursive(
         &self,
         root_directory_digest: &B3Digest,
-    ) -> BoxStream<Result<proto::Directory, Error>> {
+    ) -> BoxStream<'static, Result<proto::Directory, Error>> {
         self.as_ref().get_recursive(root_directory_digest)
     }
 
diff --git a/tvix/castore/src/directoryservice/object_store.rs b/tvix/castore/src/directoryservice/object_store.rs
new file mode 100644
index 0000000000..feaaaa39cd
--- /dev/null
+++ b/tvix/castore/src/directoryservice/object_store.rs
@@ -0,0 +1,263 @@
+use std::sync::Arc;
+
+use data_encoding::HEXLOWER;
+use futures::future::Either;
+use futures::stream::BoxStream;
+use futures::SinkExt;
+use futures::StreamExt;
+use futures::TryFutureExt;
+use futures::TryStreamExt;
+use object_store::{path::Path, ObjectStore};
+use prost::Message;
+use tokio::io::AsyncWriteExt;
+use tokio_util::codec::LengthDelimitedCodec;
+use tonic::async_trait;
+use tracing::{instrument, trace, warn, Level};
+use url::Url;
+
+use super::{
+    DirectoryGraph, DirectoryPutter, DirectoryService, LeavesToRootValidator, RootToLeavesValidator,
+};
+use crate::{proto, B3Digest, Error};
+
+/// Stores directory closures in an object store.
+/// Notably, this makes use of the option to disallow accessing child directories except when
+/// fetching them recursively via the top-level directory, since all batched writes
+/// (using `put_multiple_start`) are stored in a single object.
+/// Directories are stored in a length-delimited format with a 1MiB limit. The length field is a
+/// u32 and the directories are stored in root-to-leaves topological order, the same way they will
+/// be returned to the client in get_recursive.
+#[derive(Clone)]
+pub struct ObjectStoreDirectoryService {
+    object_store: Arc<dyn ObjectStore>,
+    base_path: Path,
+}
+
+#[instrument(level=Level::TRACE, skip_all,fields(base_path=%base_path,blob.digest=%digest),ret(Display))]
+fn derive_dirs_path(base_path: &Path, digest: &B3Digest) -> Path {
+    base_path
+        .child("dirs")
+        .child("b3")
+        .child(HEXLOWER.encode(&digest.as_slice()[..2]))
+        .child(HEXLOWER.encode(digest.as_slice()))
+}
+
+#[allow(clippy::identity_op)]
+const MAX_FRAME_LENGTH: usize = 1 * 1024 * 1024 * 1000; // 1 MiB
+                                                        //
+impl ObjectStoreDirectoryService {
+    /// Constructs a new [ObjectStoreBlobService] from a [Url] supported by
+    /// [object_store].
+    /// Any path suffix becomes the base path of the object store.
+    /// additional options, the same as in [object_store::parse_url_opts] can
+    /// be passed.
+    pub fn parse_url_opts<I, K, V>(url: &Url, options: I) -> Result<Self, object_store::Error>
+    where
+        I: IntoIterator<Item = (K, V)>,
+        K: AsRef<str>,
+        V: Into<String>,
+    {
+        let (object_store, path) = object_store::parse_url_opts(url, options)?;
+
+        Ok(Self {
+            object_store: Arc::new(object_store),
+            base_path: path,
+        })
+    }
+
+    /// Like [Self::parse_url_opts], except without the options.
+    pub fn parse_url(url: &Url) -> Result<Self, object_store::Error> {
+        Self::parse_url_opts(url, Vec::<(String, String)>::new())
+    }
+}
+
+#[async_trait]
+impl DirectoryService for ObjectStoreDirectoryService {
+    /// This is the same steps as for get_recursive anyways, so we just call get_recursive and
+    /// return the first element of the stream and drop the request.
+    #[instrument(skip(self, digest), fields(directory.digest = %digest))]
+    async fn get(&self, digest: &B3Digest) -> Result<Option<proto::Directory>, Error> {
+        self.get_recursive(digest).take(1).next().await.transpose()
+    }
+
+    #[instrument(skip(self, directory), fields(directory.digest = %directory.digest()))]
+    async fn put(&self, directory: proto::Directory) -> Result<B3Digest, Error> {
+        if !directory.directories.is_empty() {
+            return Err(Error::InvalidRequest(
+                    "only put_multiple_start is supported by the ObjectStoreDirectoryService for directories with children".into(),
+            ));
+        }
+
+        let mut handle = self.put_multiple_start();
+        handle.put(directory).await?;
+        handle.close().await
+    }
+
+    #[instrument(skip_all, fields(directory.digest = %root_directory_digest))]
+    fn get_recursive(
+        &self,
+        root_directory_digest: &B3Digest,
+    ) -> BoxStream<'static, Result<proto::Directory, Error>> {
+        // Check that we are not passing on bogus from the object store to the client, and that the
+        // trust chain from the root digest to the leaves is intact
+        let mut order_validator =
+            RootToLeavesValidator::new_with_root_digest(root_directory_digest.clone());
+
+        let dir_path = derive_dirs_path(&self.base_path, root_directory_digest);
+        let object_store = self.object_store.clone();
+
+        Box::pin(
+            (async move {
+                let stream = match object_store.get(&dir_path).await {
+                    Ok(v) => v.into_stream(),
+                    Err(object_store::Error::NotFound { .. }) => {
+                        return Ok(Either::Left(futures::stream::empty()))
+                    }
+                    Err(e) => return Err(std::io::Error::from(e).into()),
+                };
+
+                // get a reader of the response body.
+                let r = tokio_util::io::StreamReader::new(stream);
+                let decompressed_stream = async_compression::tokio::bufread::ZstdDecoder::new(r);
+
+                // the subdirectories are stored in a length delimited format
+                let delimited_stream = LengthDelimitedCodec::builder()
+                    .max_frame_length(MAX_FRAME_LENGTH)
+                    .length_field_type::<u32>()
+                    .new_read(decompressed_stream);
+
+                let dirs_stream = delimited_stream.map_err(Error::from).and_then(move |buf| {
+                    futures::future::ready((|| {
+                        let mut hasher = blake3::Hasher::new();
+                        let digest: B3Digest = hasher.update(&buf).finalize().as_bytes().into();
+
+                        // Ensure to only decode the directory objects whose digests we trust
+                        if !order_validator.digest_allowed(&digest) {
+                            return Err(crate::Error::StorageError(format!(
+                                "received unexpected directory {}",
+                                digest
+                            )));
+                        }
+
+                        let directory = proto::Directory::decode(&*buf).map_err(|e| {
+                            warn!("unable to parse directory {}: {}", digest, e);
+                            Error::StorageError(e.to_string())
+                        })?;
+
+                        // Allow the children to appear next
+                        order_validator.add_directory_unchecked(&directory);
+
+                        Ok(directory)
+                    })())
+                });
+
+                Ok(Either::Right(dirs_stream))
+            })
+            .try_flatten_stream(),
+        )
+    }
+
+    #[instrument(skip_all)]
+    fn put_multiple_start(&self) -> Box<(dyn DirectoryPutter + 'static)>
+    where
+        Self: Clone,
+    {
+        Box::new(ObjectStoreDirectoryPutter::new(
+            self.object_store.clone(),
+            self.base_path.clone(),
+        ))
+    }
+}
+
+struct ObjectStoreDirectoryPutter {
+    object_store: Arc<dyn ObjectStore>,
+    base_path: Path,
+
+    directory_validator: Option<DirectoryGraph<LeavesToRootValidator>>,
+}
+
+impl ObjectStoreDirectoryPutter {
+    fn new(object_store: Arc<dyn ObjectStore>, base_path: Path) -> Self {
+        Self {
+            object_store,
+            base_path,
+            directory_validator: Some(Default::default()),
+        }
+    }
+}
+
+#[async_trait]
+impl DirectoryPutter for ObjectStoreDirectoryPutter {
+    #[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest()), err)]
+    async fn put(&mut self, directory: proto::Directory) -> Result<(), Error> {
+        match self.directory_validator {
+            None => return Err(Error::StorageError("already closed".to_string())),
+            Some(ref mut validator) => {
+                validator
+                    .add(directory)
+                    .map_err(|e| Error::StorageError(e.to_string()))?;
+            }
+        }
+
+        Ok(())
+    }
+
+    #[instrument(level = "trace", skip_all, ret, err)]
+    async fn close(&mut self) -> Result<B3Digest, Error> {
+        let validator = match self.directory_validator.take() {
+            None => return Err(Error::InvalidRequest("already closed".to_string())),
+            Some(validator) => validator,
+        };
+
+        // retrieve the validated directories.
+        // It is important that they are in topological order (root first),
+        // since that's how we want to retrieve them from the object store in the end.
+        let directories = validator
+            .validate()
+            .map_err(|e| Error::StorageError(e.to_string()))?
+            .drain_root_to_leaves()
+            .collect::<Vec<_>>();
+
+        // Get the root digest
+        let root_digest = directories
+            .first()
+            .ok_or_else(|| Error::InvalidRequest("got no directories".to_string()))?
+            .digest();
+
+        let dir_path = derive_dirs_path(&self.base_path, &root_digest);
+
+        match self.object_store.head(&dir_path).await {
+            // directory tree already exists, nothing to do
+            Ok(_) => {
+                trace!("directory tree already exists");
+            }
+
+            // directory tree does not yet exist, compress and upload.
+            Err(object_store::Error::NotFound { .. }) => {
+                trace!("uploading directory tree");
+
+                let object_store_writer =
+                    object_store::buffered::BufWriter::new(self.object_store.clone(), dir_path);
+                let compressed_writer =
+                    async_compression::tokio::write::ZstdEncoder::new(object_store_writer);
+                let mut directories_sink = LengthDelimitedCodec::builder()
+                    .max_frame_length(MAX_FRAME_LENGTH)
+                    .length_field_type::<u32>()
+                    .new_write(compressed_writer);
+
+                for directory in directories {
+                    directories_sink
+                        .send(directory.encode_to_vec().into())
+                        .await?;
+                }
+
+                let mut compressed_writer = directories_sink.into_inner();
+                compressed_writer.shutdown().await?;
+            }
+            // other error
+            Err(err) => Err(std::io::Error::from(err))?,
+        }
+
+        Ok(root_digest)
+    }
+}
diff --git a/tvix/castore/src/directoryservice/order_validator.rs b/tvix/castore/src/directoryservice/order_validator.rs
new file mode 100644
index 0000000000..6045f5d241
--- /dev/null
+++ b/tvix/castore/src/directoryservice/order_validator.rs
@@ -0,0 +1,181 @@
+use std::collections::HashSet;
+use tracing::warn;
+
+use crate::{proto::Directory, B3Digest};
+
+pub trait OrderValidator {
+    /// Update the order validator's state with the directory
+    /// Returns whether the directory was accepted
+    fn add_directory(&mut self, directory: &Directory) -> bool;
+}
+
+#[derive(Default)]
+/// Validates that newly introduced directories are already referenced from
+/// the root via existing directories.
+/// Commonly used when _receiving_ a directory closure _from_ a store.
+pub struct RootToLeavesValidator {
+    /// Only used to remember the root node, not for validation
+    expected_digests: HashSet<B3Digest>,
+}
+
+impl RootToLeavesValidator {
+    /// Use to validate the root digest of the closure upon receiving the first
+    /// directory.
+    pub fn new_with_root_digest(root_digest: B3Digest) -> Self {
+        let mut this = Self::default();
+        this.expected_digests.insert(root_digest);
+        this
+    }
+
+    /// Checks if a directory is in-order based on its digest.
+    ///
+    /// Particularly useful when receiving directories in canonical protobuf
+    /// encoding, so that directories not connected to the root can be rejected
+    /// without parsing.
+    ///
+    /// After parsing, the directory must be passed to `add_directory_unchecked`
+    /// to add its children to the list of expected digests.
+    pub fn digest_allowed(&self, digest: &B3Digest) -> bool {
+        self.expected_digests.is_empty() // we don't know the root node; allow any
+            || self.expected_digests.contains(digest)
+    }
+
+    /// Update the order validator's state with the directory
+    pub fn add_directory_unchecked(&mut self, directory: &Directory) {
+        // No initial root was specified and this is the first directory
+        if self.expected_digests.is_empty() {
+            self.expected_digests.insert(directory.digest());
+        }
+
+        for subdir in &directory.directories {
+            // Allow the children to appear next
+            let subdir_digest = subdir.digest.clone().try_into().unwrap();
+            self.expected_digests.insert(subdir_digest);
+        }
+    }
+}
+
+impl OrderValidator for RootToLeavesValidator {
+    fn add_directory(&mut self, directory: &Directory) -> bool {
+        if !self.digest_allowed(&directory.digest()) {
+            return false;
+        }
+        self.add_directory_unchecked(directory);
+        true
+    }
+}
+
+#[derive(Default)]
+/// Validates that newly uploaded directories only reference directories which
+/// have already been introduced.
+/// Commonly used when _uploading_ a directory closure _to_ a store.
+pub struct LeavesToRootValidator {
+    /// This is empty in the beginning, and gets filled as leaves and intermediates are
+    /// inserted
+    allowed_references: HashSet<B3Digest>,
+}
+
+impl OrderValidator for LeavesToRootValidator {
+    fn add_directory(&mut self, directory: &Directory) -> bool {
+        let digest = directory.digest();
+
+        for subdir in &directory.directories {
+            let subdir_digest = subdir.digest.clone().try_into().unwrap(); // this has been validated in validate_directory()
+            if !self.allowed_references.contains(&subdir_digest) {
+                warn!(
+                    directory.digest = %digest,
+                    subdirectory.digest = %subdir_digest,
+                    "unexpected directory reference"
+                );
+                return false;
+            }
+        }
+
+        self.allowed_references.insert(digest.clone());
+
+        true
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::{LeavesToRootValidator, RootToLeavesValidator};
+    use crate::directoryservice::order_validator::OrderValidator;
+    use crate::fixtures::{DIRECTORY_A, DIRECTORY_B, DIRECTORY_C};
+    use crate::proto::Directory;
+    use rstest::rstest;
+
+    #[rstest]
+    /// Uploading an empty directory should succeed.
+    #[case::empty_directory(&[&*DIRECTORY_A], false)]
+    /// Uploading A, then B (referring to A) should succeed.
+    #[case::simple_closure(&[&*DIRECTORY_A, &*DIRECTORY_B], false)]
+    /// Uploading A, then A, then C (referring to A twice) should succeed.
+    /// We pretend to be a dumb client not deduping directories.
+    #[case::same_child(&[&*DIRECTORY_A, &*DIRECTORY_A, &*DIRECTORY_C], false)]
+    /// Uploading A, then C (referring to A twice) should succeed.
+    #[case::same_child_dedup(&[&*DIRECTORY_A, &*DIRECTORY_C], false)]
+    /// Uploading A, then C (referring to A twice), then B (itself referring to A) should fail during close,
+    /// as B itself would be left unconnected.
+    #[case::unconnected_node(&[&*DIRECTORY_A, &*DIRECTORY_C, &*DIRECTORY_B], false)]
+    /// Uploading B (referring to A) should fail immediately, because A was never uploaded.
+    #[case::dangling_pointer(&[&*DIRECTORY_B], true)]
+    fn leaves_to_root(
+        #[case] directories_to_upload: &[&Directory],
+        #[case] exp_fail_upload_last: bool,
+    ) {
+        let mut validator = LeavesToRootValidator::default();
+        let len_directories_to_upload = directories_to_upload.len();
+
+        for (i, d) in directories_to_upload.iter().enumerate() {
+            let resp = validator.add_directory(d);
+            if i == len_directories_to_upload - 1 && exp_fail_upload_last {
+                assert!(!resp, "expect last put to fail");
+
+                // We don't really care anymore what finalize() would return, as
+                // the add() failed.
+                return;
+            } else {
+                assert!(resp, "expect put to succeed");
+            }
+        }
+    }
+
+    #[rstest]
+    /// Downloading an empty directory should succeed.
+    #[case::empty_directory(&*DIRECTORY_A, &[&*DIRECTORY_A], false)]
+    /// Downlading B, then A (referenced by B) should succeed.
+    #[case::simple_closure(&*DIRECTORY_B, &[&*DIRECTORY_B, &*DIRECTORY_A], false)]
+    /// Downloading C (referring to A twice), then A should succeed.
+    #[case::same_child_dedup(&*DIRECTORY_C, &[&*DIRECTORY_C, &*DIRECTORY_A], false)]
+    /// Downloading C, then B (both referring to A but not referring to each other) should fail immediately as B has no connection to C (the root)
+    #[case::unconnected_node(&*DIRECTORY_C, &[&*DIRECTORY_C, &*DIRECTORY_B], true)]
+    /// Downloading B (specified as the root) but receiving A instead should fail immediately, because A has no connection to B (the root).
+    #[case::dangling_pointer(&*DIRECTORY_B, &[&*DIRECTORY_A], true)]
+    fn root_to_leaves(
+        #[case] root: &Directory,
+        #[case] directories_to_upload: &[&Directory],
+        #[case] exp_fail_upload_last: bool,
+    ) {
+        let mut validator = RootToLeavesValidator::new_with_root_digest(root.digest());
+        let len_directories_to_upload = directories_to_upload.len();
+
+        for (i, d) in directories_to_upload.iter().enumerate() {
+            let resp1 = validator.digest_allowed(&d.digest());
+            let resp = validator.add_directory(d);
+            assert_eq!(
+                resp1, resp,
+                "digest_allowed should return the same value as add_directory"
+            );
+            if i == len_directories_to_upload - 1 && exp_fail_upload_last {
+                assert!(!resp, "expect last put to fail");
+
+                // We don't really care anymore what finalize() would return, as
+                // the add() failed.
+                return;
+            } else {
+                assert!(resp, "expect put to succeed");
+            }
+        }
+    }
+}
diff --git a/tvix/castore/src/directoryservice/simple_putter.rs b/tvix/castore/src/directoryservice/simple_putter.rs
index 25617ebcac..dc54e3d11d 100644
--- a/tvix/castore/src/directoryservice/simple_putter.rs
+++ b/tvix/castore/src/directoryservice/simple_putter.rs
@@ -1,6 +1,6 @@
-use super::ClosureValidator;
 use super::DirectoryPutter;
 use super::DirectoryService;
+use super::{DirectoryGraph, LeavesToRootValidator};
 use crate::proto;
 use crate::B3Digest;
 use crate::Error;
@@ -14,7 +14,7 @@ use tracing::warn;
 pub struct SimplePutter<DS: DirectoryService> {
     directory_service: DS,
 
-    directory_validator: Option<ClosureValidator>,
+    directory_validator: Option<DirectoryGraph<LeavesToRootValidator>>,
 }
 
 impl<DS: DirectoryService> SimplePutter<DS> {
@@ -33,7 +33,9 @@ impl<DS: DirectoryService + 'static> DirectoryPutter for SimplePutter<DS> {
         match self.directory_validator {
             None => return Err(Error::StorageError("already closed".to_string())),
             Some(ref mut validator) => {
-                validator.add(directory)?;
+                validator
+                    .add(directory)
+                    .map_err(|e| Error::StorageError(e.to_string()))?;
             }
         }
 
@@ -46,7 +48,11 @@ impl<DS: DirectoryService + 'static> DirectoryPutter for SimplePutter<DS> {
             None => Err(Error::InvalidRequest("already closed".to_string())),
             Some(validator) => {
                 // retrieve the validated directories.
-                let directories = validator.finalize()?;
+                let directories = validator
+                    .validate()
+                    .map_err(|e| Error::StorageError(e.to_string()))?
+                    .drain_leaves_to_root()
+                    .collect::<Vec<_>>();
 
                 // Get the root digest, which is at the end (cf. insertion order)
                 let root_digest = directories
diff --git a/tvix/castore/src/directoryservice/sled.rs b/tvix/castore/src/directoryservice/sled.rs
index d531513e1e..bd98ed6b1e 100644
--- a/tvix/castore/src/directoryservice/sled.rs
+++ b/tvix/castore/src/directoryservice/sled.rs
@@ -8,7 +8,7 @@ use tonic::async_trait;
 use tracing::{instrument, warn};
 
 use super::utils::traverse_directory;
-use super::{ClosureValidator, DirectoryPutter, DirectoryService};
+use super::{DirectoryGraph, DirectoryPutter, DirectoryService, LeavesToRootValidator};
 
 #[derive(Clone)]
 pub struct SledDirectoryService {
@@ -112,7 +112,7 @@ impl DirectoryService for SledDirectoryService {
     fn get_recursive(
         &self,
         root_directory_digest: &B3Digest,
-    ) -> BoxStream<Result<proto::Directory, Error>> {
+    ) -> BoxStream<'static, Result<proto::Directory, Error>> {
         traverse_directory(self.clone(), root_directory_digest)
     }
 
@@ -135,7 +135,7 @@ pub struct SledDirectoryPutter {
 
     /// The directories (inside the directory validator) that we insert later,
     /// or None, if they were already inserted.
-    directory_validator: Option<ClosureValidator>,
+    directory_validator: Option<DirectoryGraph<LeavesToRootValidator>>,
 }
 
 #[async_trait]
@@ -145,7 +145,9 @@ impl DirectoryPutter for SledDirectoryPutter {
         match self.directory_validator {
             None => return Err(Error::StorageError("already closed".to_string())),
             Some(ref mut validator) => {
-                validator.add(directory)?;
+                validator
+                    .add(directory)
+                    .map_err(|e| Error::StorageError(e.to_string()))?;
             }
         }
 
@@ -162,7 +164,11 @@ impl DirectoryPutter for SledDirectoryPutter {
                     let tree = self.tree.clone();
                     move || {
                         // retrieve the validated directories.
-                        let directories = validator.finalize()?;
+                        let directories = validator
+                            .validate()
+                            .map_err(|e| Error::StorageError(e.to_string()))?
+                            .drain_leaves_to_root()
+                            .collect::<Vec<_>>();
 
                         // Get the root digest, which is at the end (cf. insertion order)
                         let root_digest = directories
diff --git a/tvix/castore/src/directoryservice/tests/mod.rs b/tvix/castore/src/directoryservice/tests/mod.rs
index 1b40d9feb0..cc3c5b788a 100644
--- a/tvix/castore/src/directoryservice/tests/mod.rs
+++ b/tvix/castore/src/directoryservice/tests/mod.rs
@@ -26,6 +26,7 @@ use self::utils::make_grpc_directory_service_client;
 #[case::grpc(make_grpc_directory_service_client().await)]
 #[case::memory(directoryservice::from_addr("memory://").await.unwrap())]
 #[case::sled(directoryservice::from_addr("sled://").await.unwrap())]
+#[case::objectstore(directoryservice::from_addr("objectstore+memory://").await.unwrap())]
 #[cfg_attr(all(feature = "cloud", feature = "integration"), case::bigtable(directoryservice::from_addr("bigtable://instance-1?project_id=project-1&table_name=table-1&family_name=cf1").await.unwrap()))]
 pub fn directory_services(#[case] directory_service: impl DirectoryService) {}
 
diff --git a/tvix/castore/src/directoryservice/traverse.rs b/tvix/castore/src/directoryservice/traverse.rs
index 8a668c868c..17a51ae2bb 100644
--- a/tvix/castore/src/directoryservice/traverse.rs
+++ b/tvix/castore/src/directoryservice/traverse.rs
@@ -1,5 +1,8 @@
 use super::DirectoryService;
-use crate::{proto::NamedNode, B3Digest, Error, Path};
+use crate::{
+    proto::{node::Node, NamedNode},
+    B3Digest, Error, Path,
+};
 use tracing::{instrument, warn};
 
 /// This descends from a (root) node to the given (sub)path, returning the Node
@@ -7,69 +10,55 @@ use tracing::{instrument, warn};
 #[instrument(skip(directory_service, path), fields(%path))]
 pub async fn descend_to<DS>(
     directory_service: DS,
-    root_node: crate::proto::node::Node,
+    root_node: Node,
     path: impl AsRef<Path> + std::fmt::Display,
-) -> Result<Option<crate::proto::node::Node>, Error>
+) -> Result<Option<Node>, Error>
 where
     DS: AsRef<dyn DirectoryService>,
 {
-    let mut cur_node = root_node;
-    let mut it = path.as_ref().components();
-
-    loop {
-        match it.next() {
-            None => {
-                // the (remaining) path is empty, return the node we're current at.
-                return Ok(Some(cur_node));
+    let mut parent_node = root_node;
+    for component in path.as_ref().components() {
+        match parent_node {
+            Node::File(_) | Node::Symlink(_) => {
+                // There's still some path left, but the parent node is no directory.
+                // This means the path doesn't exist, as we can't reach it.
+                return Ok(None);
             }
-            Some(first_component) => {
-                match cur_node {
-                    crate::proto::node::Node::File(_) | crate::proto::node::Node::Symlink(_) => {
-                        // There's still some path left, but the current node is no directory.
-                        // This means the path doesn't exist, as we can't reach it.
-                        return Ok(None);
-                    }
-                    crate::proto::node::Node::Directory(directory_node) => {
-                        let digest: B3Digest = directory_node.digest.try_into().map_err(|_e| {
-                            Error::StorageError("invalid digest length".to_string())
+            Node::Directory(directory_node) => {
+                let digest: B3Digest = directory_node
+                    .digest
+                    .try_into()
+                    .map_err(|_e| Error::StorageError("invalid digest length".to_string()))?;
+
+                // fetch the linked node from the directory_service.
+                let directory =
+                    directory_service
+                        .as_ref()
+                        .get(&digest)
+                        .await?
+                        .ok_or_else(|| {
+                            // If we didn't get the directory node that's linked, that's a store inconsistency, bail out!
+                            warn!("directory {} does not exist", digest);
+
+                            Error::StorageError(format!("directory {} does not exist", digest))
                         })?;
 
-                        // fetch the linked node from the directory_service
-                        match directory_service.as_ref().get(&digest).await? {
-                            // If we didn't get the directory node that's linked, that's a store inconsistency, bail out!
-                            None => {
-                                warn!("directory {} does not exist", digest);
-
-                                return Err(Error::StorageError(format!(
-                                    "directory {} does not exist",
-                                    digest
-                                )));
-                            }
-                            Some(directory) => {
-                                // look for first_component in the [Directory].
-                                // FUTUREWORK: as the nodes() iterator returns in a sorted fashion, we
-                                // could stop as soon as e.name is larger than the search string.
-                                let child_node =
-                                    directory.nodes().find(|n| n.get_name() == first_component);
-
-                                match child_node {
-                                    // child node not found means there's no such element inside the directory.
-                                    None => {
-                                        return Ok(None);
-                                    }
-                                    // child node found, return to top-of loop to find the next
-                                    // node in the path.
-                                    Some(child_node) => {
-                                        cur_node = child_node;
-                                    }
-                                }
-                            }
-                        }
-                    }
+                // look for the component in the [Directory].
+                // FUTUREWORK: as the nodes() iterator returns in a sorted fashion, we
+                // could stop as soon as e.name is larger than the search string.
+                if let Some(child_node) = directory.nodes().find(|n| n.get_name() == component) {
+                    // child node found, update prev_node to that and continue.
+                    parent_node = child_node;
+                } else {
+                    // child node not found means there's no such element inside the directory.
+                    return Ok(None);
                 }
             }
         }
     }
+
+    // We traversed the entire path, so this must be the node.
+    Ok(Some(parent_node))
 }
 
 #[cfg(test)]
diff --git a/tvix/castore/src/directoryservice/utils.rs b/tvix/castore/src/directoryservice/utils.rs
index 01c521076c..a0ba395ecd 100644
--- a/tvix/castore/src/directoryservice/utils.rs
+++ b/tvix/castore/src/directoryservice/utils.rs
@@ -2,14 +2,16 @@ use super::DirectoryService;
 use crate::proto;
 use crate::B3Digest;
 use crate::Error;
-use async_stream::stream;
+use async_stream::try_stream;
 use futures::stream::BoxStream;
 use std::collections::{HashSet, VecDeque};
+use tracing::instrument;
 use tracing::warn;
 
 /// Traverses a [proto::Directory] from the root to the children.
 ///
 /// This is mostly BFS, but directories are only returned once.
+#[instrument(skip(directory_service))]
 pub fn traverse_directory<'a, DS: DirectoryService + 'static>(
     directory_service: DS,
     root_directory_digest: &B3Digest,
@@ -23,60 +25,53 @@ pub fn traverse_directory<'a, DS: DirectoryService + 'static>(
     // We omit sending the same directories multiple times.
     let mut sent_directory_digests: HashSet<B3Digest> = HashSet::new();
 
-    let stream = stream! {
+    Box::pin(try_stream! {
         while let Some(current_directory_digest) = worklist_directory_digests.pop_front() {
-            match directory_service.get(&current_directory_digest).await {
+            let current_directory = directory_service.get(&current_directory_digest).await.map_err(|e| {
+                warn!("failed to look up directory");
+                Error::StorageError(format!(
+                    "unable to look up directory {}: {}",
+                    current_directory_digest, e
+                ))
+            })?.ok_or_else(|| {
                 // if it's not there, we have an inconsistent store!
-                Ok(None) => {
-                    warn!("directory {} does not exist", current_directory_digest);
-                    yield Err(Error::StorageError(format!(
-                        "directory {} does not exist",
-                        current_directory_digest
-                    )));
-                }
-                Err(e) => {
-                    warn!("failed to look up directory");
-                    yield Err(Error::StorageError(format!(
-                        "unable to look up directory {}: {}",
-                        current_directory_digest, e
-                    )));
-                }
+                warn!("directory {} does not exist", current_directory_digest);
+                Error::StorageError(format!(
+                    "directory {} does not exist",
+                    current_directory_digest
+                ))
 
-                // if we got it
-                Ok(Some(current_directory)) => {
-                    // validate, we don't want to send invalid directories.
-                    if let Err(e) = current_directory.validate() {
-                        warn!("directory failed validation: {}", e.to_string());
-                        yield Err(Error::StorageError(format!(
-                            "invalid directory: {}",
-                            current_directory_digest
-                        )));
-                    }
+            })?;
 
-                    // We're about to send this directory, so let's avoid sending it again if a
-                    // descendant has it.
-                    sent_directory_digests.insert(current_directory_digest);
+            // validate, we don't want to send invalid directories.
+            current_directory.validate().map_err(|e| {
+               warn!("directory failed validation: {}", e.to_string());
+               Error::StorageError(format!(
+                   "invalid directory: {}",
+                   current_directory_digest
+               ))
+            })?;
 
-                    // enqueue all child directory digests to the work queue, as
-                    // long as they're not part of the worklist or already sent.
-                    // This panics if the digest looks invalid, it's supposed to be checked first.
-                    for child_directory_node in &current_directory.directories {
-                        // TODO: propagate error
-                        let child_digest: B3Digest = child_directory_node.digest.clone().try_into().unwrap();
+            // We're about to send this directory, so let's avoid sending it again if a
+            // descendant has it.
+            sent_directory_digests.insert(current_directory_digest);
 
-                        if worklist_directory_digests.contains(&child_digest)
-                            || sent_directory_digests.contains(&child_digest)
-                        {
-                            continue;
-                        }
-                        worklist_directory_digests.push_back(child_digest);
-                    }
+            // enqueue all child directory digests to the work queue, as
+            // long as they're not part of the worklist or already sent.
+            // This panics if the digest looks invalid, it's supposed to be checked first.
+            for child_directory_node in &current_directory.directories {
+                // TODO: propagate error
+                let child_digest: B3Digest = child_directory_node.digest.clone().try_into().unwrap();
 
-                    yield Ok(current_directory);
+                if worklist_directory_digests.contains(&child_digest)
+                    || sent_directory_digests.contains(&child_digest)
+                {
+                    continue;
                 }
-            };
-        }
-    };
+                worklist_directory_digests.push_back(child_digest);
+            }
 
-    Box::pin(stream)
+            yield current_directory;
+        }
+    })
 }
diff --git a/tvix/castore/src/fs/fuse.rs b/tvix/castore/src/fs/fuse/mod.rs
index cd50618ff5..64ef29ed2a 100644
--- a/tvix/castore/src/fs/fuse.rs
+++ b/tvix/castore/src/fs/fuse/mod.rs
@@ -1,8 +1,13 @@
-use std::{io, path::Path, sync::Arc, thread};
+use std::{io, path::Path, sync::Arc};
 
 use fuse_backend_rs::{api::filesystem::FileSystem, transport::FuseSession};
+use parking_lot::Mutex;
+use threadpool::ThreadPool;
 use tracing::{error, instrument};
 
+#[cfg(test)]
+mod tests;
+
 struct FuseServer<FS>
 where
     FS: FileSystem + Sync + Send,
@@ -46,9 +51,12 @@ where
     }
 }
 
+/// Starts a [Filesystem] with the specified number of threads, and provides
+/// functions to unmount, and wait for it to have completed.
+#[derive(Clone)]
 pub struct FuseDaemon {
-    session: FuseSession,
-    threads: Vec<thread::JoinHandle<()>>,
+    session: Arc<Mutex<FuseSession>>,
+    threads: Arc<ThreadPool>,
 }
 
 impl FuseDaemon {
@@ -56,7 +64,7 @@ impl FuseDaemon {
     pub fn new<FS, P>(
         fs: FS,
         mountpoint: P,
-        threads: usize,
+        num_threads: usize,
         allow_other: bool,
     ) -> Result<Self, io::Error>
     where
@@ -73,40 +81,49 @@ impl FuseDaemon {
         session
             .mount()
             .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
-        let mut join_handles = Vec::with_capacity(threads);
-        for _ in 0..threads {
+
+        // construct a thread pool
+        let threads = threadpool::Builder::new()
+            .num_threads(num_threads)
+            .thread_name("fuse_server".to_string())
+            .build();
+
+        for _ in 0..num_threads {
+            // for each thread requested, create and start a FuseServer accepting requests.
             let mut server = FuseServer {
                 server: server.clone(),
                 channel: session
                     .new_channel()
                     .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?,
             };
-            let join_handle = thread::Builder::new()
-                .name("fuse_server".to_string())
-                .spawn(move || {
-                    let _ = server.start();
-                })?;
-            join_handles.push(join_handle);
+
+            threads.execute(move || {
+                let _ = server.start();
+            });
         }
 
         Ok(FuseDaemon {
-            session,
-            threads: join_handles,
+            session: Arc::new(Mutex::new(session)),
+            threads: Arc::new(threads),
         })
     }
 
+    /// Waits for all threads to finish.
+    #[instrument(skip_all)]
+    pub fn wait(&self) {
+        self.threads.join()
+    }
+
+    /// Send the unmount command, and waits for all threads to finish.
     #[instrument(skip_all, err)]
-    pub fn unmount(&mut self) -> Result<(), io::Error> {
+    pub fn unmount(&self) -> Result<(), io::Error> {
+        // Send the unmount command.
         self.session
+            .lock()
             .umount()
             .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
 
-        for thread in self.threads.drain(..) {
-            thread.join().map_err(|_| {
-                io::Error::new(io::ErrorKind::Other, "failed to join fuse server thread")
-            })?;
-        }
-
+        self.wait();
         Ok(())
     }
 }
diff --git a/tvix/castore/src/fs/tests.rs b/tvix/castore/src/fs/fuse/tests.rs
index d6eeb8a411..bcebcf4a72 100644
--- a/tvix/castore/src/fs/tests.rs
+++ b/tvix/castore/src/fs/fuse/tests.rs
@@ -11,7 +11,8 @@ use std::{
 use tempfile::TempDir;
 use tokio_stream::{wrappers::ReadDirStream, StreamExt};
 
-use super::{fuse::FuseDaemon, TvixStoreFs};
+use super::FuseDaemon;
+use crate::fs::{TvixStoreFs, XATTR_NAME_BLOB_DIGEST, XATTR_NAME_DIRECTORY_DIGEST};
 use crate::proto as castorepb;
 use crate::proto::node::Node;
 use crate::{
@@ -247,7 +248,7 @@ async fn mount() {
 
     let (blob_service, directory_service) = gen_svcs();
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         BTreeMap::default(),
@@ -270,7 +271,7 @@ async fn root() {
     let tmpdir = TempDir::new().unwrap();
 
     let (blob_service, directory_service) = gen_svcs();
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         BTreeMap::default(),
@@ -304,7 +305,7 @@ async fn root_with_listing() {
 
     populate_blob_a(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -348,7 +349,7 @@ async fn stat_file_at_root() {
 
     populate_blob_a(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -385,7 +386,7 @@ async fn read_file_at_root() {
 
     populate_blob_a(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -422,7 +423,7 @@ async fn read_large_file_at_root() {
 
     populate_blob_b(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -467,7 +468,7 @@ async fn symlink_readlink() {
 
     populate_symlink(&mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -514,7 +515,7 @@ async fn read_stat_through_symlink() {
     populate_blob_a(&blob_service, &mut root_nodes).await;
     populate_symlink(&mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -559,7 +560,7 @@ async fn read_stat_directory() {
 
     populate_directory_with_keep(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -596,7 +597,7 @@ async fn xattr() {
     populate_directory_with_keep(&blob_service, &directory_service, &mut root_nodes).await;
     populate_blob_a(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -614,12 +615,12 @@ async fn xattr() {
         // There should be 1 key, XATTR_NAME_DIRECTORY_DIGEST.
         assert_eq!(1, xattr_names.len(), "there should be 1 xattr name");
         assert_eq!(
-            super::XATTR_NAME_DIRECTORY_DIGEST,
+            XATTR_NAME_DIRECTORY_DIGEST,
             xattr_names.first().unwrap().as_encoded_bytes()
         );
 
         // The key should equal to the string-formatted b3 digest.
-        let val = xattr::get(&p, OsStr::from_bytes(super::XATTR_NAME_DIRECTORY_DIGEST))
+        let val = xattr::get(&p, OsStr::from_bytes(XATTR_NAME_DIRECTORY_DIGEST))
             .expect("must succeed")
             .expect("must be some");
         assert_eq!(
@@ -643,12 +644,12 @@ async fn xattr() {
         // There should be 1 key, XATTR_NAME_BLOB_DIGEST.
         assert_eq!(1, xattr_names.len(), "there should be 1 xattr name");
         assert_eq!(
-            super::XATTR_NAME_BLOB_DIGEST,
+            XATTR_NAME_BLOB_DIGEST,
             xattr_names.first().unwrap().as_encoded_bytes()
         );
 
         // The key should equal to the string-formatted b3 digest.
-        let val = xattr::get(&p, OsStr::from_bytes(super::XATTR_NAME_BLOB_DIGEST))
+        let val = xattr::get(&p, OsStr::from_bytes(XATTR_NAME_BLOB_DIGEST))
             .expect("must succeed")
             .expect("must be some");
         assert_eq!(
@@ -679,7 +680,7 @@ async fn read_blob_inside_dir() {
 
     populate_directory_with_keep(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -719,7 +720,7 @@ async fn read_blob_deep_inside_dir() {
 
     populate_directory_complicated(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -762,7 +763,7 @@ async fn readdir() {
 
     populate_directory_complicated(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -822,7 +823,7 @@ async fn readdir_deep() {
 
     populate_directory_complicated(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -872,7 +873,7 @@ async fn check_attributes() {
     populate_symlink(&mut root_nodes).await;
     populate_blob_helloworld(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -947,7 +948,7 @@ async fn compare_inodes_directories() {
     populate_directory_with_keep(&blob_service, &directory_service, &mut root_nodes).await;
     populate_directory_complicated(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -991,7 +992,7 @@ async fn compare_inodes_files() {
 
     populate_directory_complicated(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -1040,7 +1041,7 @@ async fn compare_inodes_symlinks() {
     populate_directory_complicated(&blob_service, &directory_service, &mut root_nodes).await;
     populate_symlink2(&mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -1083,7 +1084,7 @@ async fn read_wrong_paths_in_root() {
 
     populate_blob_a(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -1138,7 +1139,7 @@ async fn disallow_writes() {
     let (blob_service, directory_service) = gen_svcs();
     let root_nodes = BTreeMap::default();
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -1170,7 +1171,7 @@ async fn missing_directory() {
 
     populate_directorynode_without_directory(&mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -1218,7 +1219,7 @@ async fn missing_blob() {
 
     populate_filenode_without_blob(&mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
diff --git a/tvix/castore/src/fs/mod.rs b/tvix/castore/src/fs/mod.rs
index 826523131f..b565ed60ac 100644
--- a/tvix/castore/src/fs/mod.rs
+++ b/tvix/castore/src/fs/mod.rs
@@ -9,9 +9,6 @@ pub mod fuse;
 #[cfg(feature = "virtiofs")]
 pub mod virtiofs;
 
-#[cfg(test)]
-mod tests;
-
 pub use self::root_nodes::RootNodes;
 use self::{
     file_attr::ROOT_FILE_ATTR,
@@ -46,7 +43,7 @@ use tokio::{
     io::{AsyncReadExt, AsyncSeekExt},
     sync::mpsc,
 };
-use tracing::{debug, error, instrument, warn, Span};
+use tracing::{debug, error, instrument, warn, Instrument as _, Span};
 
 /// This implements a read-only FUSE filesystem for a tvix-store
 /// with the passed [BlobService], [DirectoryService] and [RootNodes].
@@ -400,16 +397,20 @@ where
 
             // This task will run in the background immediately and will exit
             // after the stream ends or if we no longer want any more entries.
-            self.tokio_handle.spawn(async move {
-                let mut stream = root_nodes_provider.list().enumerate();
-                while let Some(node) = stream.next().await {
-                    if tx.send(node).await.is_err() {
-                        // If we get a send error, it means the sync code
-                        // doesn't want any more entries.
-                        break;
+            self.tokio_handle.spawn(
+                async move {
+                    let mut stream = root_nodes_provider.list().enumerate();
+                    while let Some(node) = stream.next().await {
+                        if tx.send(node).await.is_err() {
+                            // If we get a send error, it means the sync code
+                            // doesn't want any more entries.
+                            break;
+                        }
                     }
                 }
-            });
+                // instrument the task with the current span, this is not done by default
+                .in_current_span(),
+            );
 
             // Put the rx part into [self.dir_handles].
             // TODO: this will overflow after 2**64 operations,
diff --git a/tvix/castore/src/import/archive.rs b/tvix/castore/src/import/archive.rs
index 0ebb4a2361..cd5b1290e0 100644
--- a/tvix/castore/src/import/archive.rs
+++ b/tvix/castore/src/import/archive.rs
@@ -1,38 +1,23 @@
 //! Imports from an archive (tarballs)
 
 use std::collections::HashMap;
-use std::io::{Cursor, Write};
-use std::sync::Arc;
 
 use petgraph::graph::{DiGraph, NodeIndex};
 use petgraph::visit::{DfsPostOrder, EdgeRef};
 use petgraph::Direction;
 use tokio::io::AsyncRead;
-use tokio::sync::Semaphore;
-use tokio::task::JoinSet;
 use tokio_stream::StreamExt;
 use tokio_tar::Archive;
-use tokio_util::io::InspectReader;
 use tracing::{instrument, warn, Level};
 
 use crate::blobservice::BlobService;
 use crate::directoryservice::DirectoryService;
 use crate::import::{ingest_entries, IngestionEntry, IngestionError};
 use crate::proto::node::Node;
-use crate::B3Digest;
 
-type TarPathBuf = std::path::PathBuf;
-
-/// Files smaller than this threshold, in bytes, are uploaded to the [BlobService] in the
-/// background.
-///
-/// This is a u32 since we acquire a weighted semaphore using the size of the blob.
-/// [Semaphore::acquire_many_owned] takes a u32, so we need to ensure the size of
-/// the blob can be represented using a u32 and will not cause an overflow.
-const CONCURRENT_BLOB_UPLOAD_THRESHOLD: u32 = 1024 * 1024;
+use super::blobs::{self, ConcurrentBlobUploader};
 
-/// The maximum amount of bytes allowed to be buffered in memory to perform async blob uploads.
-const MAX_TARBALL_BUFFER_SIZE: usize = 128 * 1024 * 1024;
+type TarPathBuf = std::path::PathBuf;
 
 #[derive(Debug, thiserror::Error)]
 pub enum Error {
@@ -57,13 +42,6 @@ pub enum Error {
     #[error("unable to read link name field for {0}: {1}")]
     LinkName(TarPathBuf, std::io::Error),
 
-    #[error("unable to read blob contents for {0}: {1}")]
-    BlobRead(TarPathBuf, std::io::Error),
-
-    // FUTUREWORK: proper error for blob finalize
-    #[error("unable to finalize blob {0}: {1}")]
-    BlobFinalize(TarPathBuf, std::io::Error),
-
     #[error("unsupported tar entry {0} type: {1:?}")]
     EntryType(TarPathBuf, tokio_tar::EntryType),
 
@@ -72,6 +50,9 @@ pub enum Error {
 
     #[error("unexpected number of top level directory entries")]
     UnexpectedNumberOfTopLevelEntries,
+
+    #[error(transparent)]
+    BlobUploadError(#[from] blobs::Error),
 }
 
 /// Ingests elements from the given tar [`Archive`] into a the passed [`BlobService`] and
@@ -94,8 +75,7 @@ where
     // In the first phase, collect up all the regular files and symlinks.
     let mut nodes = IngestionEntryGraph::new();
 
-    let semaphore = Arc::new(Semaphore::new(MAX_TARBALL_BUFFER_SIZE));
-    let mut async_blob_uploads: JoinSet<Result<(), Error>> = JoinSet::new();
+    let mut blob_uploader = ConcurrentBlobUploader::new(blob_service);
 
     let mut entries_iter = archive.entries().map_err(Error::Entries)?;
     while let Some(mut entry) = entries_iter.try_next().await.map_err(Error::NextEntry)? {
@@ -110,77 +90,14 @@ where
             tokio_tar::EntryType::Regular
             | tokio_tar::EntryType::GNUSparse
             | tokio_tar::EntryType::Continuous => {
-                let header_size = header
+                let size = header
                     .size()
                     .map_err(|e| Error::Size(tar_path.clone(), e))?;
 
-                // If the blob is small enough, read it off the wire, compute the digest,
-                // and upload it to the [BlobService] in the background.
-                let (size, digest) = if header_size <= CONCURRENT_BLOB_UPLOAD_THRESHOLD as u64 {
-                    let mut buffer = Vec::with_capacity(header_size as usize);
-                    let mut hasher = blake3::Hasher::new();
-                    let mut reader = InspectReader::new(&mut entry, |bytes| {
-                        hasher.write_all(bytes).unwrap();
-                    });
-
-                    // Ensure that we don't buffer into memory until we've acquired a permit.
-                    // This prevents consuming too much memory when performing concurrent
-                    // blob uploads.
-                    let permit = semaphore
-                        .clone()
-                        // This cast is safe because ensure the header_size is less than
-                        // CONCURRENT_BLOB_UPLOAD_THRESHOLD which is a u32.
-                        .acquire_many_owned(header_size as u32)
-                        .await
-                        .unwrap();
-                    let size = tokio::io::copy(&mut reader, &mut buffer)
-                        .await
-                        .map_err(|e| Error::Size(tar_path.clone(), e))?;
-
-                    let digest: B3Digest = hasher.finalize().as_bytes().into();
-
-                    {
-                        let blob_service = blob_service.clone();
-                        let digest = digest.clone();
-                        async_blob_uploads.spawn({
-                            let tar_path = tar_path.clone();
-                            async move {
-                                let mut writer = blob_service.open_write().await;
-
-                                tokio::io::copy(&mut Cursor::new(buffer), &mut writer)
-                                    .await
-                                    .map_err(|e| Error::BlobRead(tar_path.clone(), e))?;
-
-                                let blob_digest = writer
-                                    .close()
-                                    .await
-                                    .map_err(|e| Error::BlobFinalize(tar_path, e))?;
-
-                                assert_eq!(digest, blob_digest, "Tvix bug: blob digest mismatch");
-
-                                // Make sure we hold the permit until we finish writing the blob
-                                // to the [BlobService].
-                                drop(permit);
-                                Ok(())
-                            }
-                        });
-                    }
-
-                    (size, digest)
-                } else {
-                    let mut writer = blob_service.open_write().await;
-
-                    let size = tokio::io::copy(&mut entry, &mut writer)
-                        .await
-                        .map_err(|e| Error::BlobRead(tar_path.clone(), e))?;
-
-                    let digest = writer
-                        .close()
-                        .await
-                        .map_err(|e| Error::BlobFinalize(tar_path.clone(), e))?;
-
-                    (size, digest)
-                };
+                let digest = blob_uploader
+                    .upload(&path, size, &mut entry)
+                    .await
+                    .map_err(Error::BlobUploadError)?;
 
                 let executable = entry
                     .header()
@@ -219,9 +136,7 @@ where
         nodes.add(entry)?;
     }
 
-    while let Some(result) = async_blob_uploads.join_next().await {
-        result.expect("task panicked")?;
-    }
+    blob_uploader.join().await.map_err(Error::BlobUploadError)?;
 
     let root_node = ingest_entries(
         directory_service,
diff --git a/tvix/castore/src/import/blobs.rs b/tvix/castore/src/import/blobs.rs
new file mode 100644
index 0000000000..8135d871d6
--- /dev/null
+++ b/tvix/castore/src/import/blobs.rs
@@ -0,0 +1,177 @@
+use std::{
+    io::{Cursor, Write},
+    sync::Arc,
+};
+
+use tokio::{
+    io::AsyncRead,
+    sync::Semaphore,
+    task::{JoinError, JoinSet},
+};
+use tokio_util::io::InspectReader;
+
+use crate::{blobservice::BlobService, B3Digest, Path, PathBuf};
+
+/// Files smaller than this threshold, in bytes, are uploaded to the [BlobService] in the
+/// background.
+///
+/// This is a u32 since we acquire a weighted semaphore using the size of the blob.
+/// [Semaphore::acquire_many_owned] takes a u32, so we need to ensure the size of
+/// the blob can be represented using a u32 and will not cause an overflow.
+const CONCURRENT_BLOB_UPLOAD_THRESHOLD: u32 = 1024 * 1024;
+
+/// The maximum amount of bytes allowed to be buffered in memory to perform async blob uploads.
+const MAX_BUFFER_SIZE: usize = 128 * 1024 * 1024;
+
+#[derive(Debug, thiserror::Error)]
+pub enum Error {
+    #[error("unable to read blob contents for {0}: {1}")]
+    BlobRead(PathBuf, std::io::Error),
+
+    // FUTUREWORK: proper error for blob finalize
+    #[error("unable to finalize blob {0}: {1}")]
+    BlobFinalize(PathBuf, std::io::Error),
+
+    #[error("unexpected size for {path} wanted: {wanted} got: {got}")]
+    UnexpectedSize {
+        path: PathBuf,
+        wanted: u64,
+        got: u64,
+    },
+
+    #[error("blob upload join error: {0}")]
+    JoinError(#[from] JoinError),
+}
+
+/// The concurrent blob uploader provides a mechanism for concurrently uploading small blobs.
+/// This is useful when ingesting from sources like tarballs and archives which each blob entry
+/// must be read sequentially. Ingesting many small blobs sequentially becomes slow due to
+/// round trip time with the blob service. The concurrent blob uploader will buffer small
+/// blobs in memory and upload them to the blob service in the background.
+///
+/// Once all blobs have been uploaded, make sure to call [ConcurrentBlobUploader::join] to wait
+/// for all background jobs to complete and check for any errors.
+pub struct ConcurrentBlobUploader<BS> {
+    blob_service: BS,
+    upload_tasks: JoinSet<Result<(), Error>>,
+    upload_semaphore: Arc<Semaphore>,
+}
+
+impl<BS> ConcurrentBlobUploader<BS>
+where
+    BS: BlobService + Clone + 'static,
+{
+    /// Creates a new concurrent blob uploader which uploads blobs to the provided
+    /// blob service.
+    pub fn new(blob_service: BS) -> Self {
+        Self {
+            blob_service,
+            upload_tasks: JoinSet::new(),
+            upload_semaphore: Arc::new(Semaphore::new(MAX_BUFFER_SIZE)),
+        }
+    }
+
+    /// Uploads a blob to the blob service. If the blob is small enough it will be read to a buffer
+    /// and uploaded in the background.
+    /// This will read the entirety of the provided reader unless an error occurs, even if blobs
+    /// are uploaded in the background..
+    pub async fn upload<R>(
+        &mut self,
+        path: &Path,
+        expected_size: u64,
+        mut r: R,
+    ) -> Result<B3Digest, Error>
+    where
+        R: AsyncRead + Unpin,
+    {
+        if expected_size < CONCURRENT_BLOB_UPLOAD_THRESHOLD as u64 {
+            let mut buffer = Vec::with_capacity(expected_size as usize);
+            let mut hasher = blake3::Hasher::new();
+            let mut reader = InspectReader::new(&mut r, |bytes| {
+                hasher.write_all(bytes).unwrap();
+            });
+
+            let permit = self
+                .upload_semaphore
+                .clone()
+                // This cast is safe because ensure the header_size is less than
+                // CONCURRENT_BLOB_UPLOAD_THRESHOLD which is a u32.
+                .acquire_many_owned(expected_size as u32)
+                .await
+                .unwrap();
+            let size = tokio::io::copy(&mut reader, &mut buffer)
+                .await
+                .map_err(|e| Error::BlobRead(path.into(), e))?;
+            let digest: B3Digest = hasher.finalize().as_bytes().into();
+
+            if size != expected_size {
+                return Err(Error::UnexpectedSize {
+                    path: path.into(),
+                    wanted: expected_size,
+                    got: size,
+                });
+            }
+
+            self.upload_tasks.spawn({
+                let blob_service = self.blob_service.clone();
+                let expected_digest = digest.clone();
+                let path = path.to_owned();
+                let r = Cursor::new(buffer);
+                async move {
+                    let digest = upload_blob(&blob_service, &path, expected_size, r).await?;
+
+                    assert_eq!(digest, expected_digest, "Tvix bug: blob digest mismatch");
+
+                    // Make sure we hold the permit until we finish writing the blob
+                    // to the [BlobService].
+                    drop(permit);
+                    Ok(())
+                }
+            });
+
+            return Ok(digest);
+        }
+
+        upload_blob(&self.blob_service, path, expected_size, r).await
+    }
+
+    /// Waits for all background upload jobs to complete, returning any upload errors.
+    pub async fn join(mut self) -> Result<(), Error> {
+        while let Some(result) = self.upload_tasks.join_next().await {
+            result??;
+        }
+        Ok(())
+    }
+}
+
+async fn upload_blob<BS, R>(
+    blob_service: &BS,
+    path: &Path,
+    expected_size: u64,
+    mut r: R,
+) -> Result<B3Digest, Error>
+where
+    BS: BlobService,
+    R: AsyncRead + Unpin,
+{
+    let mut writer = blob_service.open_write().await;
+
+    let size = tokio::io::copy(&mut r, &mut writer)
+        .await
+        .map_err(|e| Error::BlobRead(path.into(), e))?;
+
+    let digest = writer
+        .close()
+        .await
+        .map_err(|e| Error::BlobFinalize(path.into(), e))?;
+
+    if size != expected_size {
+        return Err(Error::UnexpectedSize {
+            path: path.into(),
+            wanted: expected_size,
+            got: size,
+        });
+    }
+
+    Ok(digest)
+}
diff --git a/tvix/castore/src/import/fs.rs b/tvix/castore/src/import/fs.rs
index 9d3ecfe6ab..dc7821b810 100644
--- a/tvix/castore/src/import/fs.rs
+++ b/tvix/castore/src/import/fs.rs
@@ -6,7 +6,11 @@ use std::fs::FileType;
 use std::os::unix::ffi::OsStringExt;
 use std::os::unix::fs::MetadataExt;
 use std::os::unix::fs::PermissionsExt;
+use tokio::io::BufReader;
+use tokio_util::io::InspectReader;
 use tracing::instrument;
+use tracing::Span;
+use tracing_indicatif::span_ext::IndicatifSpanExt;
 use walkdir::DirEntry;
 use walkdir::WalkDir;
 
@@ -26,7 +30,7 @@ use super::IngestionError;
 ///
 /// This function will walk the filesystem using `walkdir` and will consume
 /// `O(#number of entries)` space.
-#[instrument(skip(blob_service, directory_service), fields(path), err)]
+#[instrument(skip(blob_service, directory_service), fields(path, indicatif.pb_show=1), err)]
 pub async fn ingest_path<BS, DS, P>(
     blob_service: BS,
     directory_service: DS,
@@ -37,6 +41,10 @@ where
     BS: BlobService + Clone,
     DS: DirectoryService,
 {
+    let span = Span::current();
+    span.pb_set_message(&format!("Ingesting {:?}", path));
+    span.pb_start();
+
     let iter = WalkDir::new(path.as_ref())
         .follow_links(false)
         .follow_root_links(false)
@@ -44,7 +52,18 @@ where
         .into_iter();
 
     let entries = dir_entries_to_ingestion_stream(blob_service, iter, path.as_ref());
-    ingest_entries(directory_service, entries).await
+    ingest_entries(
+        directory_service,
+        entries.inspect({
+            let span = span.clone();
+            move |e| {
+                if e.is_ok() {
+                    span.pb_inc(1)
+                }
+            }
+        }),
+    )
+    .await
 }
 
 /// Converts an iterator of [walkdir::DirEntry]s into a stream of ingestion entries.
@@ -138,7 +157,7 @@ where
 }
 
 /// Uploads the file at the provided [Path] the the [BlobService].
-#[instrument(skip(blob_service), fields(path), err)]
+#[instrument(skip(blob_service), fields(path, indicatif.pb_show=1), err)]
 async fn upload_blob<BS>(
     blob_service: BS,
     path: impl AsRef<std::path::Path>,
@@ -146,16 +165,29 @@ async fn upload_blob<BS>(
 where
     BS: BlobService,
 {
-    let mut file = match tokio::fs::File::open(path.as_ref()).await {
-        Ok(file) => file,
-        Err(e) => return Err(Error::BlobRead(path.as_ref().to_path_buf(), e)),
-    };
+    let span = Span::current();
+    span.pb_set_style(&tvix_tracing::PB_TRANSFER_STYLE);
+    span.pb_set_message(&format!("Uploading blob for {:?}", path.as_ref()));
+    span.pb_start();
 
-    let mut writer = blob_service.open_write().await;
+    let file = tokio::fs::File::open(path.as_ref())
+        .await
+        .map_err(|e| Error::BlobRead(path.as_ref().to_path_buf(), e))?;
 
-    if let Err(e) = tokio::io::copy(&mut file, &mut writer).await {
-        return Err(Error::BlobRead(path.as_ref().to_path_buf(), e));
-    };
+    let metadata = file
+        .metadata()
+        .await
+        .map_err(|e| Error::Stat(path.as_ref().to_path_buf(), e))?;
+
+    span.pb_set_length(metadata.len());
+    let reader = InspectReader::new(file, |d| {
+        span.pb_inc(d.len() as u64);
+    });
+
+    let mut writer = blob_service.open_write().await;
+    tokio::io::copy(&mut BufReader::new(reader), &mut writer)
+        .await
+        .map_err(|e| Error::BlobRead(path.as_ref().to_path_buf(), e))?;
 
     let digest = writer
         .close()
diff --git a/tvix/castore/src/import/mod.rs b/tvix/castore/src/import/mod.rs
index e8b27e469c..a9ac0be6b0 100644
--- a/tvix/castore/src/import/mod.rs
+++ b/tvix/castore/src/import/mod.rs
@@ -14,7 +14,6 @@ use crate::proto::FileNode;
 use crate::proto::SymlinkNode;
 use crate::B3Digest;
 use futures::{Stream, StreamExt};
-
 use tracing::Level;
 
 use std::collections::HashMap;
@@ -24,6 +23,7 @@ mod error;
 pub use error::IngestionError;
 
 pub mod archive;
+pub mod blobs;
 pub mod fs;
 
 /// Ingests [IngestionEntry] from the given stream into a the passed [DirectoryService].
diff --git a/tvix/castore/src/proto/grpc_directoryservice_wrapper.rs b/tvix/castore/src/proto/grpc_directoryservice_wrapper.rs
index 7d741a3f07..ce1d2bcd24 100644
--- a/tvix/castore/src/proto/grpc_directoryservice_wrapper.rs
+++ b/tvix/castore/src/proto/grpc_directoryservice_wrapper.rs
@@ -1,12 +1,13 @@
-use crate::directoryservice::ClosureValidator;
+use crate::directoryservice::DirectoryGraph;
+use crate::directoryservice::LeavesToRootValidator;
 use crate::proto;
 use crate::{directoryservice::DirectoryService, B3Digest};
-use futures::StreamExt;
+use futures::stream::BoxStream;
+use futures::TryStreamExt;
 use std::ops::Deref;
-use tokio::sync::mpsc::channel;
-use tokio_stream::wrappers::ReceiverStream;
+use tokio_stream::once;
 use tonic::{async_trait, Request, Response, Status, Streaming};
-use tracing::{debug, instrument, warn};
+use tracing::{instrument, warn};
 
 pub struct GRPCDirectoryServiceWrapper<T> {
     directory_service: T,
@@ -23,63 +24,52 @@ impl<T> proto::directory_service_server::DirectoryService for GRPCDirectoryServi
 where
     T: Deref<Target = dyn DirectoryService> + Send + Sync + 'static,
 {
-    type GetStream = ReceiverStream<tonic::Result<proto::Directory, Status>>;
+    type GetStream = BoxStream<'static, tonic::Result<proto::Directory, Status>>;
 
     #[instrument(skip_all)]
-    async fn get(
-        &self,
+    async fn get<'a>(
+        &'a self,
         request: Request<proto::GetDirectoryRequest>,
     ) -> Result<Response<Self::GetStream>, Status> {
-        let (tx, rx) = channel(5);
-
         let req_inner = request.into_inner();
 
-        // look at the digest in the request and put it in the top of the queue.
-        match &req_inner.by_what {
-            None => return Err(Status::invalid_argument("by_what needs to be specified")),
-            Some(proto::get_directory_request::ByWhat::Digest(ref digest)) => {
+        let by_what = &req_inner
+            .by_what
+            .ok_or_else(|| Status::invalid_argument("invalid by_what"))?;
+
+        match by_what {
+            proto::get_directory_request::ByWhat::Digest(ref digest) => {
                 let digest: B3Digest = digest
                     .clone()
                     .try_into()
                     .map_err(|_e| Status::invalid_argument("invalid digest length"))?;
 
-                if !req_inner.recursive {
-                    let e: Result<proto::Directory, Status> = match self
-                        .directory_service
-                        .get(&digest)
-                        .await
-                    {
-                        Ok(Some(directory)) => Ok(directory),
-                        Ok(None) => {
-                            Err(Status::not_found(format!("directory {} not found", digest)))
-                        }
-                        Err(e) => {
-                            warn!(err = %e, directory.digest=%digest, "failed to get directory");
-                            Err(e.into())
-                        }
-                    };
-
-                    if tx.send(e).await.is_err() {
-                        debug!("receiver dropped");
+                Ok(tonic::Response::new({
+                    if !req_inner.recursive {
+                        let directory = self
+                            .directory_service
+                            .get(&digest)
+                            .await
+                            .map_err(|e| {
+                                warn!(err = %e, directory.digest=%digest, "failed to get directory");
+                                tonic::Status::new(tonic::Code::Internal, e.to_string())
+                            })?
+                            .ok_or_else(|| {
+                                Status::not_found(format!("directory {} not found", digest))
+                            })?;
+
+                        Box::pin(once(Ok(directory)))
+                    } else {
+                        // If recursive was requested, traverse via get_recursive.
+                        Box::pin(
+                            self.directory_service.get_recursive(&digest).map_err(|e| {
+                                tonic::Status::new(tonic::Code::Internal, e.to_string())
+                            }),
+                        )
                     }
-                } else {
-                    // If recursive was requested, traverse via get_recursive.
-                    let mut directories_it = self.directory_service.get_recursive(&digest);
-
-                    while let Some(e) = directories_it.next().await {
-                        // map err in res from Error to Status
-                        let res = e.map_err(|e| Status::internal(e.to_string()));
-                        if tx.send(res).await.is_err() {
-                            debug!("receiver dropped");
-                            break;
-                        }
-                    }
-                }
+                }))
             }
         }
-
-        let receiver_stream = ReceiverStream::new(rx);
-        Ok(Response::new(receiver_stream))
     }
 
     #[instrument(skip_all)]
@@ -89,14 +79,20 @@ where
     ) -> Result<Response<proto::PutDirectoryResponse>, Status> {
         let mut req_inner = request.into_inner();
 
-        // We put all Directory messages we receive into ClosureValidator first.
-        let mut validator = ClosureValidator::default();
+        // We put all Directory messages we receive into DirectoryGraph.
+        let mut validator = DirectoryGraph::<LeavesToRootValidator>::default();
         while let Some(directory) = req_inner.message().await? {
-            validator.add(directory)?;
+            validator
+                .add(directory)
+                .map_err(|e| tonic::Status::new(tonic::Code::Internal, e.to_string()))?;
         }
 
         // drain, which validates connectivity too.
-        let directories = validator.finalize()?;
+        let directories = validator
+            .validate()
+            .map_err(|e| tonic::Status::new(tonic::Code::Internal, e.to_string()))?
+            .drain_leaves_to_root()
+            .collect::<Vec<_>>();
 
         let mut directory_putter = self.directory_service.put_multiple_start();
         for directory in directories {
diff --git a/tvix/cli/Cargo.toml b/tvix/cli/Cargo.toml
index ce8d361771..644393a7c4 100644
--- a/tvix/cli/Cargo.toml
+++ b/tvix/cli/Cargo.toml
@@ -14,14 +14,23 @@ tvix-castore = { path = "../castore" }
 tvix-store = { path = "../store", default-features = false, features = []}
 tvix-eval = { path = "../eval" }
 tvix-glue = { path = "../glue" }
+tvix-tracing = { path = "../tracing" }
 bytes = "1.4.0"
 clap = { version = "4.0", features = ["derive", "env"] }
 dirs = "4.0.0"
 rustyline = "10.0.0"
+rnix = "0.11.0"
 thiserror = "1.0.38"
 tokio = "1.28.0"
-tracing = { version = "0.1.37", features = ["max_level_trace", "release_max_level_info"] }
-tracing-subscriber = { version = "0.3.16", features = ["json"] }
+tracing = "0.1.40"
+tracing-indicatif = "0.3.6"
 
 [dependencies.wu-manber]
 git = "https://github.com/tvlfyi/wu-manber.git"
+
+[target.'cfg(not(target_env = "msvc"))'.dependencies]
+tikv-jemallocator = "0.5"
+
+[features]
+default = []
+tracy = ["tvix-tracing/tracy"]
diff --git a/tvix/cli/src/main.rs b/tvix/cli/src/main.rs
index d66d2ce4cb..686513b77c 100644
--- a/tvix/cli/src/main.rs
+++ b/tvix/cli/src/main.rs
@@ -1,25 +1,36 @@
+mod repl;
+
 use clap::Parser;
-use rustyline::{error::ReadlineError, Editor};
+use repl::Repl;
 use std::rc::Rc;
 use std::{fs, path::PathBuf};
-use tracing::Level;
-use tracing_subscriber::fmt::writer::MakeWriterExt;
-use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
-use tracing_subscriber::{EnvFilter, Layer};
+use tracing::{instrument, Level, Span};
+use tracing_indicatif::span_ext::IndicatifSpanExt;
 use tvix_build::buildservice;
 use tvix_eval::builtins::impure_builtins;
 use tvix_eval::observer::{DisassemblingObserver, TracingObserver};
-use tvix_eval::{EvalIO, Value};
+use tvix_eval::{ErrorKind, EvalIO, Value};
 use tvix_glue::builtins::add_fetcher_builtins;
 use tvix_glue::builtins::add_import_builtins;
 use tvix_glue::tvix_io::TvixIO;
 use tvix_glue::tvix_store_io::TvixStoreIO;
 use tvix_glue::{builtins::add_derivation_builtins, configure_nix_path};
 
-#[derive(Parser)]
+#[cfg(not(target_env = "msvc"))]
+use tikv_jemallocator::Jemalloc;
+
+#[cfg(not(target_env = "msvc"))]
+#[global_allocator]
+static GLOBAL: Jemalloc = Jemalloc;
+
+#[derive(Parser, Clone)]
 struct Args {
-    #[arg(long)]
-    log_level: Option<Level>,
+    /// A global log level to use when printing logs.
+    /// It's also possible to set `RUST_LOG` according to
+    /// `tracing_subscriber::filter::EnvFilter`, which will always have
+    /// priority.
+    #[arg(long, default_value_t=Level::INFO)]
+    log_level: Level,
 
     /// Path to a script to evaluate
     script: Option<PathBuf>,
@@ -123,16 +134,39 @@ fn init_io_handle(tokio_runtime: &tokio::runtime::Runtime, args: &Args) -> Rc<Tv
     ))
 }
 
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
+enum AllowIncomplete {
+    Allow,
+    #[default]
+    RequireComplete,
+}
+
+impl AllowIncomplete {
+    fn allow(&self) -> bool {
+        matches!(self, Self::Allow)
+    }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+struct IncompleteInput;
+
 /// Interprets the given code snippet, printing out warnings, errors
 /// and the result itself. The return value indicates whether
 /// evaluation succeeded.
+#[instrument(skip_all, fields(indicatif.pb_show=1))]
 fn interpret(
     tvix_store_io: Rc<TvixStoreIO>,
     code: &str,
     path: Option<PathBuf>,
     args: &Args,
     explain: bool,
-) -> bool {
+    allow_incomplete: AllowIncomplete,
+) -> Result<bool, IncompleteInput> {
+    let span = Span::current();
+    span.pb_start();
+    span.pb_set_style(&tvix_tracing::PB_SPINNER_STYLE);
+    span.pb_set_message("Setting up evaluator…");
+
     let mut eval = tvix_eval::Evaluation::new(
         Box::new(TvixIO::new(tvix_store_io.clone() as Rc<dyn EvalIO>)) as Box<dyn EvalIO>,
         true,
@@ -160,9 +194,22 @@ fn interpret(
             eval.runtime_observer = Some(&mut runtime_observer);
         }
 
+        span.pb_set_message("Evaluating…");
         eval.evaluate(code, path)
     };
 
+    if allow_incomplete.allow()
+        && result.errors.iter().any(|err| {
+            matches!(
+                &err.kind,
+                ErrorKind::ParseErrors(pes)
+                    if pes.iter().any(|pe| matches!(pe, rnix::parser::ParseError::UnexpectedEOF))
+            )
+        })
+    {
+        return Err(IncompleteInput);
+    }
+
     if args.display_ast {
         if let Some(ref expr) = result.expr {
             eprintln!("AST: {}", tvix_eval::pretty_print_expr(expr));
@@ -188,7 +235,7 @@ fn interpret(
     }
 
     // inform the caller about any errors
-    result.errors.is_empty()
+    Ok(result.errors.is_empty())
 }
 
 /// Interpret the given code snippet, but only run the Tvix compiler
@@ -232,24 +279,11 @@ fn lint(code: &str, path: Option<PathBuf>, args: &Args) -> bool {
 fn main() {
     let args = Args::parse();
 
-    // configure log settings
-    let level = args.log_level.unwrap_or(Level::INFO);
-
-    let subscriber = tracing_subscriber::registry().with(
-        tracing_subscriber::fmt::Layer::new()
-            .with_writer(std::io::stderr.with_max_level(level))
-            .compact()
-            .with_filter(
-                EnvFilter::builder()
-                    .with_default_directive(level.into())
-                    .from_env()
-                    .expect("invalid RUST_LOG"),
-            ),
-    );
-    subscriber
-        .try_init()
+    let _ = tvix_tracing::TracingBuilder::default()
+        .level(args.log_level)
+        .enable_progressbar()
+        .build()
         .expect("unable to set up tracing subscriber");
-
     let tokio_runtime = tokio::runtime::Runtime::new().expect("failed to setup tokio runtime");
 
     let io_handle = init_io_handle(&tokio_runtime, &args);
@@ -257,11 +291,21 @@ fn main() {
     if let Some(file) = &args.script {
         run_file(io_handle, file.clone(), &args)
     } else if let Some(expr) = &args.expr {
-        if !interpret(io_handle, expr, None, &args, false) {
+        if !interpret(
+            io_handle,
+            expr,
+            None,
+            &args,
+            false,
+            AllowIncomplete::RequireComplete,
+        )
+        .unwrap()
+        {
             std::process::exit(1);
         }
     } else {
-        run_prompt(io_handle, &args)
+        let mut repl = Repl::new();
+        repl.run(io_handle, &args)
     }
 }
 
@@ -274,7 +318,15 @@ fn run_file(io_handle: Rc<TvixStoreIO>, mut path: PathBuf, args: &Args) {
     let success = if args.compile_only {
         lint(&contents, Some(path), args)
     } else {
-        interpret(io_handle, &contents, Some(path), args, false)
+        interpret(
+            io_handle,
+            &contents,
+            Some(path),
+            args,
+            false,
+            AllowIncomplete::RequireComplete,
+        )
+        .unwrap()
     };
 
     if !success {
@@ -289,61 +341,3 @@ fn println_result(result: &Value, raw: bool) {
         println!("=> {} :: {}", result, result.type_of())
     }
 }
-
-fn state_dir() -> Option<PathBuf> {
-    let mut path = dirs::data_dir();
-    if let Some(p) = path.as_mut() {
-        p.push("tvix")
-    }
-    path
-}
-
-fn run_prompt(io_handle: Rc<TvixStoreIO>, args: &Args) {
-    let mut rl = Editor::<()>::new().expect("should be able to launch rustyline");
-
-    if args.compile_only {
-        eprintln!("warning: `--compile-only` has no effect on REPL usage!");
-    }
-
-    let history_path = match state_dir() {
-        // Attempt to set up these paths, but do not hard fail if it
-        // doesn't work.
-        Some(mut path) => {
-            let _ = std::fs::create_dir_all(&path);
-            path.push("history.txt");
-            let _ = rl.load_history(&path);
-            Some(path)
-        }
-
-        None => None,
-    };
-
-    loop {
-        let readline = rl.readline("tvix-repl> ");
-        match readline {
-            Ok(line) => {
-                if line.is_empty() {
-                    continue;
-                }
-
-                rl.add_history_entry(&line);
-
-                if let Some(without_prefix) = line.strip_prefix(":d ") {
-                    interpret(Rc::clone(&io_handle), without_prefix, None, args, true);
-                } else {
-                    interpret(Rc::clone(&io_handle), &line, None, args, false);
-                }
-            }
-            Err(ReadlineError::Interrupted) | Err(ReadlineError::Eof) => break,
-
-            Err(err) => {
-                eprintln!("error: {}", err);
-                break;
-            }
-        }
-    }
-
-    if let Some(path) = history_path {
-        rl.save_history(&path).unwrap();
-    }
-}
diff --git a/tvix/cli/src/repl.rs b/tvix/cli/src/repl.rs
new file mode 100644
index 0000000000..5a4830a027
--- /dev/null
+++ b/tvix/cli/src/repl.rs
@@ -0,0 +1,175 @@
+use std::path::PathBuf;
+use std::rc::Rc;
+
+use rustyline::{error::ReadlineError, Editor};
+use tvix_glue::tvix_store_io::TvixStoreIO;
+
+use crate::{interpret, AllowIncomplete, Args, IncompleteInput};
+
+fn state_dir() -> Option<PathBuf> {
+    let mut path = dirs::data_dir();
+    if let Some(p) = path.as_mut() {
+        p.push("tvix")
+    }
+    path
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ReplCommand<'a> {
+    Expr(&'a str),
+    Explain(&'a str),
+    Print(&'a str),
+    Quit,
+    Help,
+}
+
+impl<'a> ReplCommand<'a> {
+    const HELP: &'static str = "
+Welcome to the Tvix REPL!
+
+The following commands are supported:
+
+  <expr>    Evaluate a Nix language expression and print the result, along with its inferred type
+  :d <expr> Evaluate a Nix language expression and print a detailed description of the result
+  :p <expr> Evaluate a Nix language expression and print the result recursively
+  :q        Exit the REPL
+  :?, :h    Display this help text
+";
+
+    pub fn parse(input: &'a str) -> Self {
+        if input.starts_with(':') {
+            if let Some(without_prefix) = input.strip_prefix(":d ") {
+                return Self::Explain(without_prefix);
+            } else if let Some(without_prefix) = input.strip_prefix(":p ") {
+                return Self::Print(without_prefix);
+            }
+
+            let input = input.trim_end();
+            match input {
+                ":q" => return Self::Quit,
+                ":h" | ":?" => return Self::Help,
+                _ => {}
+            }
+        }
+
+        Self::Expr(input)
+    }
+}
+
+#[derive(Debug)]
+pub struct Repl {
+    /// In-progress multiline input, when the input so far doesn't parse as a complete expression
+    multiline_input: Option<String>,
+    rl: Editor<()>,
+}
+
+impl Repl {
+    pub fn new() -> Self {
+        let rl = Editor::<()>::new().expect("should be able to launch rustyline");
+        Self {
+            multiline_input: None,
+            rl,
+        }
+    }
+
+    pub fn run(&mut self, io_handle: Rc<TvixStoreIO>, args: &Args) {
+        if args.compile_only {
+            eprintln!("warning: `--compile-only` has no effect on REPL usage!");
+        }
+
+        let history_path = match state_dir() {
+            // Attempt to set up these paths, but do not hard fail if it
+            // doesn't work.
+            Some(mut path) => {
+                let _ = std::fs::create_dir_all(&path);
+                path.push("history.txt");
+                let _ = self.rl.load_history(&path);
+                Some(path)
+            }
+
+            None => None,
+        };
+
+        loop {
+            let prompt = if self.multiline_input.is_some() {
+                "         > "
+            } else {
+                "tvix-repl> "
+            };
+
+            let readline = self.rl.readline(prompt);
+            match readline {
+                Ok(line) => {
+                    if line.is_empty() {
+                        continue;
+                    }
+
+                    let input = if let Some(mi) = &mut self.multiline_input {
+                        mi.push('\n');
+                        mi.push_str(&line);
+                        mi
+                    } else {
+                        &line
+                    };
+
+                    let res = match ReplCommand::parse(input) {
+                        ReplCommand::Quit => break,
+                        ReplCommand::Help => {
+                            println!("{}", ReplCommand::HELP);
+                            Ok(false)
+                        }
+                        ReplCommand::Expr(input) => interpret(
+                            Rc::clone(&io_handle),
+                            input,
+                            None,
+                            args,
+                            false,
+                            AllowIncomplete::Allow,
+                        ),
+                        ReplCommand::Explain(input) => interpret(
+                            Rc::clone(&io_handle),
+                            input,
+                            None,
+                            args,
+                            true,
+                            AllowIncomplete::Allow,
+                        ),
+                        ReplCommand::Print(input) => interpret(
+                            Rc::clone(&io_handle),
+                            input,
+                            None,
+                            &Args {
+                                strict: true,
+                                ..(args.clone())
+                            },
+                            false,
+                            AllowIncomplete::Allow,
+                        ),
+                    };
+
+                    match res {
+                        Ok(_) => {
+                            self.rl.add_history_entry(input);
+                            self.multiline_input = None;
+                        }
+                        Err(IncompleteInput) => {
+                            if self.multiline_input.is_none() {
+                                self.multiline_input = Some(line);
+                            }
+                        }
+                    }
+                }
+                Err(ReadlineError::Interrupted) | Err(ReadlineError::Eof) => break,
+
+                Err(err) => {
+                    eprintln!("error: {}", err);
+                    break;
+                }
+            }
+        }
+
+        if let Some(path) = history_path {
+            self.rl.save_history(&path).unwrap();
+        }
+    }
+}
diff --git a/tvix/default.nix b/tvix/default.nix
index f562cf37de..efbaf54f46 100644
--- a/tvix/default.nix
+++ b/tvix/default.nix
@@ -4,7 +4,6 @@
 let
   # crate override for crates that need protobuf
   protobufDep = prev: (prev.nativeBuildInputs or [ ]) ++ [ pkgs.buildPackages.protobuf ];
-  iconvDarwinDep = lib.optional pkgs.stdenv.isDarwin pkgs.libiconv;
 
   # On Darwin, some crates producing binaries need to be able to link against security.
   darwinDeps = lib.optionals pkgs.stdenv.isDarwin (with pkgs.buildPackages.darwin.apple_sdk.frameworks; [
@@ -13,28 +12,8 @@ let
   ]);
 
   # Load the crate2nix crate tree.
-  crates = import ./Cargo.nix {
-    inherit pkgs;
-    nixpkgs = pkgs.path;
-
-    # Hack to fix Darwin build
-    # See https://github.com/NixOS/nixpkgs/issues/218712
-    buildRustCrateForPkgs = pkgs:
-      if pkgs.stdenv.isDarwin then
-        let
-          buildRustCrate = pkgs.buildRustCrate;
-          buildRustCrate_ = args: buildRustCrate args // { dontStrip = true; };
-          override = o: args: buildRustCrate.override o (args // { dontStrip = true; });
-        in
-        pkgs.makeOverridable override { }
-      else pkgs.buildRustCrate;
-
+  crates = pkgs.callPackage ./Cargo.nix {
     defaultCrateOverrides = pkgs.defaultCrateOverrides // {
-      zstd-sys = prev: {
-        nativeBuildInputs = prev.nativeBuildInputs or [ ];
-        buildInputs = prev.buildInputs or [ ] ++ iconvDarwinDep;
-      };
-
       opentelemetry-proto = prev: {
         nativeBuildInputs = protobufDep prev;
       };
@@ -52,21 +31,34 @@ let
       };
 
       tvix-build = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc rec {
+          root = prev.src.origSrc;
+          extraFileset = (lib.fileset.fileFilter (f: f.hasExt "proto") root);
+        };
         PROTO_ROOT = depot.tvix.build.protos.protos;
         nativeBuildInputs = protobufDep prev;
         buildInputs = darwinDeps;
       };
 
       tvix-castore = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc rec {
+          root = prev.src.origSrc;
+          extraFileset = (lib.fileset.fileFilter (f: f.hasExt "proto") root);
+        };
         PROTO_ROOT = depot.tvix.castore.protos.protos;
         nativeBuildInputs = protobufDep prev;
       };
 
       tvix-cli = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
         buildInputs = prev.buildInputs or [ ] ++ darwinDeps;
       };
 
       tvix-store = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc rec {
+          root = prev.src.origSrc;
+          extraFileset = (lib.fileset.fileFilter (f: f.hasExt "proto") root);
+        };
         PROTO_ROOT = depot.tvix.store.protos.protos;
         nativeBuildInputs = protobufDep prev;
         # fuse-backend-rs uses DiskArbitration framework to handle mount/unmount on Darwin
@@ -74,6 +66,38 @@ let
           ++ darwinDeps
           ++ lib.optional pkgs.stdenv.isDarwin pkgs.buildPackages.darwin.apple_sdk.frameworks.DiskArbitration;
       };
+
+      tvix-eval-builtin-macros = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
+      };
+
+      tvix-eval = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc rec {
+          root = prev.src.origSrc;
+          extraFileset = (root + "/proptest-regressions");
+        };
+      };
+
+      tvix-glue = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc {
+          root = prev.src.origSrc;
+        };
+      };
+
+      tvix-serde = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
+      };
+
+      tvix-tracing = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
+      };
+
+      nix-compat = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc rec {
+          root = prev.src.origSrc;
+          extraFileset = (root + "/testdata");
+        };
+      };
     };
   };
 
@@ -125,13 +149,13 @@ in
     pkgs.stdenv.mkDerivation {
       inherit src;
 
-      # Important: we include the hash of the Cargo.lock file and
-      # Cargo.nix file in the derivation name.  This forces the FOD
-      # to be rebuilt/reverified whenever either of them changes.
-      name = "tvix-crate2nix-check-" +
-        (builtins.substring 0 8 (builtins.hashFile "sha256" ./Cargo.lock)) +
-        "-" +
-        (builtins.substring 0 8 (builtins.hashFile "sha256" ./Cargo.nix));
+      # Important: we include the hash of all Cargo related files in the derivation name.
+      # This forces the FOD to be rebuilt/re-verified whenever one of them changes.
+      name = "tvix-crate2nix-check-" + builtins.substring 0 8 (builtins.hashString "sha256"
+        (lib.concatMapStrings (f: builtins.hashFile "sha256" f)
+          ([ ./Cargo.toml ./Cargo.lock ] ++ (map (m: ./. + "/${m}/Cargo.toml") (lib.importTOML ./Cargo.toml).workspace.members))
+        )
+      );
 
       nativeBuildInputs = with pkgs; [ git cacert cargo ];
       buildPhase = ''
@@ -197,7 +221,7 @@ in
 
     buildInputs = [
       pkgs.fuse
-    ] ++ iconvDarwinDep;
+    ] ++ lib.optional pkgs.stdenv.isDarwin pkgs.libiconv;
 
     buildPhase = ''
       cargo doc --document-private-items
@@ -224,9 +248,7 @@ in
       rustPlatform.cargoSetupHook
     ];
 
-    # Allow blocks_in_conditions due to false positives with #[tracing::instrument(…)]:
-    # https://github.com/rust-lang/rust-clippy/issues/12281
-    buildPhase = "cargo clippy --tests --all-features --benches --examples -- -Dwarnings -A clippy::blocks_in_conditions | tee $out";
+    buildPhase = "cargo clippy --tests --all-features --benches --examples -- -Dwarnings | tee $out";
   };
 
   meta.ci.targets = [
@@ -235,4 +257,6 @@ in
     "shell"
     "rust-docs"
   ];
+
+  utils = import ./utils.nix { inherit lib depot; };
 }
diff --git a/tvix/docs/book.toml b/tvix/docs/book.toml
index 7318a90233..a5dacc46ed 100644
--- a/tvix/docs/book.toml
+++ b/tvix/docs/book.toml
@@ -9,3 +9,17 @@ title = "Tvix Docs"
 # override the /usr/bin/plantuml default
 plantuml-cmd = "plantuml"
 use-data-uris = true
+
+[preprocessor.admonish]
+command = "mdbook-admonish"
+after = ["links"] # ensure `{{#include}}` also gets processed
+assets_version = "3.0.2" # do not edit: managed by `mdbook-admonish install`
+
+[preprocessor.d2]
+command = "d2"
+after = ["links"] # ensure `{{#include}}` also gets processed
+
+[output]
+
+[output.html]
+additional-css = ["./mdbook-admonish.css"]
diff --git a/tvix/docs/default.nix b/tvix/docs/default.nix
index 9fc2f76576..3b102e4b7c 100644
--- a/tvix/docs/default.nix
+++ b/tvix/docs/default.nix
@@ -9,7 +9,10 @@ pkgs.stdenv.mkDerivation {
   src = lib.cleanSource ./.;
 
   nativeBuildInputs = [
+    pkgs.d2
     pkgs.mdbook
+    pkgs.mdbook-admonish
+    pkgs.mdbook-d2
     pkgs.mdbook-plantuml
     pkgs.plantuml
   ];
diff --git a/tvix/docs/mdbook-admonish.css b/tvix/docs/mdbook-admonish.css
new file mode 100644
index 0000000000..45aeff0511
--- /dev/null
+++ b/tvix/docs/mdbook-admonish.css
@@ -0,0 +1,348 @@
+@charset "UTF-8";
+:is(.admonition) {
+  display: flow-root;
+  margin: 1.5625em 0;
+  padding: 0 1.2rem;
+  color: var(--fg);
+  page-break-inside: avoid;
+  background-color: var(--bg);
+  border: 0 solid black;
+  border-inline-start-width: 0.4rem;
+  border-radius: 0.2rem;
+  box-shadow: 0 0.2rem 1rem rgba(0, 0, 0, 0.05), 0 0 0.1rem rgba(0, 0, 0, 0.1);
+}
+@media print {
+  :is(.admonition) {
+    box-shadow: none;
+  }
+}
+:is(.admonition) > * {
+  box-sizing: border-box;
+}
+:is(.admonition) :is(.admonition) {
+  margin-top: 1em;
+  margin-bottom: 1em;
+}
+:is(.admonition) > .tabbed-set:only-child {
+  margin-top: 0;
+}
+html :is(.admonition) > :last-child {
+  margin-bottom: 1.2rem;
+}
+
+a.admonition-anchor-link {
+  display: none;
+  position: absolute;
+  left: -1.2rem;
+  padding-right: 1rem;
+}
+a.admonition-anchor-link:link, a.admonition-anchor-link:visited {
+  color: var(--fg);
+}
+a.admonition-anchor-link:link:hover, a.admonition-anchor-link:visited:hover {
+  text-decoration: none;
+}
+a.admonition-anchor-link::before {
+  content: "§";
+}
+
+:is(.admonition-title, summary.admonition-title) {
+  position: relative;
+  min-height: 4rem;
+  margin-block: 0;
+  margin-inline: -1.6rem -1.2rem;
+  padding-block: 0.8rem;
+  padding-inline: 4.4rem 1.2rem;
+  font-weight: 700;
+  background-color: rgba(68, 138, 255, 0.1);
+  print-color-adjust: exact;
+  -webkit-print-color-adjust: exact;
+  display: flex;
+}
+:is(.admonition-title, summary.admonition-title) p {
+  margin: 0;
+}
+html :is(.admonition-title, summary.admonition-title):last-child {
+  margin-bottom: 0;
+}
+:is(.admonition-title, summary.admonition-title)::before {
+  position: absolute;
+  top: 0.625em;
+  inset-inline-start: 1.6rem;
+  width: 2rem;
+  height: 2rem;
+  background-color: #448aff;
+  print-color-adjust: exact;
+  -webkit-print-color-adjust: exact;
+  mask-image: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"></svg>');
+  -webkit-mask-image: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"></svg>');
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-size: contain;
+  content: "";
+}
+:is(.admonition-title, summary.admonition-title):hover a.admonition-anchor-link {
+  display: initial;
+}
+
+details.admonition > summary.admonition-title::after {
+  position: absolute;
+  top: 0.625em;
+  inset-inline-end: 1.6rem;
+  height: 2rem;
+  width: 2rem;
+  background-color: currentcolor;
+  mask-image: var(--md-details-icon);
+  -webkit-mask-image: var(--md-details-icon);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-size: contain;
+  content: "";
+  transform: rotate(0deg);
+  transition: transform 0.25s;
+}
+details[open].admonition > summary.admonition-title::after {
+  transform: rotate(90deg);
+}
+
+:root {
+  --md-details-icon: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M8.59 16.58 13.17 12 8.59 7.41 10 6l6 6-6 6-1.41-1.42Z'/></svg>");
+}
+
+:root {
+  --md-admonition-icon--admonish-note: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M20.71 7.04c.39-.39.39-1.04 0-1.41l-2.34-2.34c-.37-.39-1.02-.39-1.41 0l-1.84 1.83 3.75 3.75M3 17.25V21h3.75L17.81 9.93l-3.75-3.75L3 17.25z'/></svg>");
+  --md-admonition-icon--admonish-abstract: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M17 9H7V7h10m0 6H7v-2h10m-3 6H7v-2h7M12 3a1 1 0 0 1 1 1 1 1 0 0 1-1 1 1 1 0 0 1-1-1 1 1 0 0 1 1-1m7 0h-4.18C14.4 1.84 13.3 1 12 1c-1.3 0-2.4.84-2.82 2H5a2 2 0 0 0-2 2v14a2 2 0 0 0 2 2h14a2 2 0 0 0 2-2V5a2 2 0 0 0-2-2z'/></svg>");
+  --md-admonition-icon--admonish-info: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M13 9h-2V7h2m0 10h-2v-6h2m-1-9A10 10 0 0 0 2 12a10 10 0 0 0 10 10 10 10 0 0 0 10-10A10 10 0 0 0 12 2z'/></svg>");
+  --md-admonition-icon--admonish-tip: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M17.66 11.2c-.23-.3-.51-.56-.77-.82-.67-.6-1.43-1.03-2.07-1.66C13.33 7.26 13 4.85 13.95 3c-.95.23-1.78.75-2.49 1.32-2.59 2.08-3.61 5.75-2.39 8.9.04.1.08.2.08.33 0 .22-.15.42-.35.5-.23.1-.47.04-.66-.12a.58.58 0 0 1-.14-.17c-1.13-1.43-1.31-3.48-.55-5.12C5.78 10 4.87 12.3 5 14.47c.06.5.12 1 .29 1.5.14.6.41 1.2.71 1.73 1.08 1.73 2.95 2.97 4.96 3.22 2.14.27 4.43-.12 6.07-1.6 1.83-1.66 2.47-4.32 1.53-6.6l-.13-.26c-.21-.46-.77-1.26-.77-1.26m-3.16 6.3c-.28.24-.74.5-1.1.6-1.12.4-2.24-.16-2.9-.82 1.19-.28 1.9-1.16 2.11-2.05.17-.8-.15-1.46-.28-2.23-.12-.74-.1-1.37.17-2.06.19.38.39.76.63 1.06.77 1 1.98 1.44 2.24 2.8.04.14.06.28.06.43.03.82-.33 1.72-.93 2.27z'/></svg>");
+  --md-admonition-icon--admonish-success: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='m9 20.42-6.21-6.21 2.83-2.83L9 14.77l9.88-9.89 2.83 2.83L9 20.42z'/></svg>");
+  --md-admonition-icon--admonish-question: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='m15.07 11.25-.9.92C13.45 12.89 13 13.5 13 15h-2v-.5c0-1.11.45-2.11 1.17-2.83l1.24-1.26c.37-.36.59-.86.59-1.41a2 2 0 0 0-2-2 2 2 0 0 0-2 2H8a4 4 0 0 1 4-4 4 4 0 0 1 4 4 3.2 3.2 0 0 1-.93 2.25M13 19h-2v-2h2M12 2A10 10 0 0 0 2 12a10 10 0 0 0 10 10 10 10 0 0 0 10-10c0-5.53-4.5-10-10-10z'/></svg>");
+  --md-admonition-icon--admonish-warning: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M13 14h-2V9h2m0 9h-2v-2h2M1 21h22L12 2 1 21z'/></svg>");
+  --md-admonition-icon--admonish-failure: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M20 6.91 17.09 4 12 9.09 6.91 4 4 6.91 9.09 12 4 17.09 6.91 20 12 14.91 17.09 20 20 17.09 14.91 12 20 6.91z'/></svg>");
+  --md-admonition-icon--admonish-danger: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M11 15H6l7-14v8h5l-7 14v-8z'/></svg>");
+  --md-admonition-icon--admonish-bug: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M14 12h-4v-2h4m0 6h-4v-2h4m6-6h-2.81a5.985 5.985 0 0 0-1.82-1.96L17 4.41 15.59 3l-2.17 2.17a6.002 6.002 0 0 0-2.83 0L8.41 3 7 4.41l1.62 1.63C7.88 6.55 7.26 7.22 6.81 8H4v2h2.09c-.05.33-.09.66-.09 1v1H4v2h2v1c0 .34.04.67.09 1H4v2h2.81c1.04 1.79 2.97 3 5.19 3s4.15-1.21 5.19-3H20v-2h-2.09c.05-.33.09-.66.09-1v-1h2v-2h-2v-1c0-.34-.04-.67-.09-1H20V8z'/></svg>");
+  --md-admonition-icon--admonish-example: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M7 13v-2h14v2H7m0 6v-2h14v2H7M7 7V5h14v2H7M3 8V5H2V4h2v4H3m-1 9v-1h3v4H2v-1h2v-.5H3v-1h1V17H2m2.25-7a.75.75 0 0 1 .75.75c0 .2-.08.39-.21.52L3.12 13H5v1H2v-.92L4 11H2v-1h2.25z'/></svg>");
+  --md-admonition-icon--admonish-quote: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M14 17h3l2-4V7h-6v6h3M6 17h3l2-4V7H5v6h3l-2 4z'/></svg>");
+}
+
+:is(.admonition):is(.admonish-note) {
+  border-color: #448aff;
+}
+
+:is(.admonish-note) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(68, 138, 255, 0.1);
+}
+:is(.admonish-note) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #448aff;
+  mask-image: var(--md-admonition-icon--admonish-note);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-note);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-abstract, .admonish-summary, .admonish-tldr) {
+  border-color: #00b0ff;
+}
+
+:is(.admonish-abstract, .admonish-summary, .admonish-tldr) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(0, 176, 255, 0.1);
+}
+:is(.admonish-abstract, .admonish-summary, .admonish-tldr) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #00b0ff;
+  mask-image: var(--md-admonition-icon--admonish-abstract);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-abstract);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-info, .admonish-todo) {
+  border-color: #00b8d4;
+}
+
+:is(.admonish-info, .admonish-todo) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(0, 184, 212, 0.1);
+}
+:is(.admonish-info, .admonish-todo) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #00b8d4;
+  mask-image: var(--md-admonition-icon--admonish-info);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-info);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-tip, .admonish-hint, .admonish-important) {
+  border-color: #00bfa5;
+}
+
+:is(.admonish-tip, .admonish-hint, .admonish-important) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(0, 191, 165, 0.1);
+}
+:is(.admonish-tip, .admonish-hint, .admonish-important) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #00bfa5;
+  mask-image: var(--md-admonition-icon--admonish-tip);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-tip);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-success, .admonish-check, .admonish-done) {
+  border-color: #00c853;
+}
+
+:is(.admonish-success, .admonish-check, .admonish-done) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(0, 200, 83, 0.1);
+}
+:is(.admonish-success, .admonish-check, .admonish-done) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #00c853;
+  mask-image: var(--md-admonition-icon--admonish-success);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-success);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-question, .admonish-help, .admonish-faq) {
+  border-color: #64dd17;
+}
+
+:is(.admonish-question, .admonish-help, .admonish-faq) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(100, 221, 23, 0.1);
+}
+:is(.admonish-question, .admonish-help, .admonish-faq) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #64dd17;
+  mask-image: var(--md-admonition-icon--admonish-question);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-question);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-warning, .admonish-caution, .admonish-attention) {
+  border-color: #ff9100;
+}
+
+:is(.admonish-warning, .admonish-caution, .admonish-attention) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(255, 145, 0, 0.1);
+}
+:is(.admonish-warning, .admonish-caution, .admonish-attention) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #ff9100;
+  mask-image: var(--md-admonition-icon--admonish-warning);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-warning);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-failure, .admonish-fail, .admonish-missing) {
+  border-color: #ff5252;
+}
+
+:is(.admonish-failure, .admonish-fail, .admonish-missing) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(255, 82, 82, 0.1);
+}
+:is(.admonish-failure, .admonish-fail, .admonish-missing) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #ff5252;
+  mask-image: var(--md-admonition-icon--admonish-failure);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-failure);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-danger, .admonish-error) {
+  border-color: #ff1744;
+}
+
+:is(.admonish-danger, .admonish-error) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(255, 23, 68, 0.1);
+}
+:is(.admonish-danger, .admonish-error) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #ff1744;
+  mask-image: var(--md-admonition-icon--admonish-danger);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-danger);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-bug) {
+  border-color: #f50057;
+}
+
+:is(.admonish-bug) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(245, 0, 87, 0.1);
+}
+:is(.admonish-bug) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #f50057;
+  mask-image: var(--md-admonition-icon--admonish-bug);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-bug);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-example) {
+  border-color: #7c4dff;
+}
+
+:is(.admonish-example) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(124, 77, 255, 0.1);
+}
+:is(.admonish-example) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #7c4dff;
+  mask-image: var(--md-admonition-icon--admonish-example);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-example);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-quote, .admonish-cite) {
+  border-color: #9e9e9e;
+}
+
+:is(.admonish-quote, .admonish-cite) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(158, 158, 158, 0.1);
+}
+:is(.admonish-quote, .admonish-cite) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #9e9e9e;
+  mask-image: var(--md-admonition-icon--admonish-quote);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-quote);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+.navy :is(.admonition) {
+  background-color: var(--sidebar-bg);
+}
+
+.ayu :is(.admonition),
+.coal :is(.admonition) {
+  background-color: var(--theme-hover);
+}
+
+.rust :is(.admonition) {
+  background-color: var(--sidebar-bg);
+  color: var(--sidebar-fg);
+}
+.rust .admonition-anchor-link:link, .rust .admonition-anchor-link:visited {
+  color: var(--sidebar-fg);
+}
diff --git a/tvix/docs/src/SUMMARY.md b/tvix/docs/src/SUMMARY.md
index b0e47a0011..633c13683e 100644
--- a/tvix/docs/src/SUMMARY.md
+++ b/tvix/docs/src/SUMMARY.md
@@ -4,7 +4,32 @@
 - [Architecture & data flow](./architecture.md)
 - [TODOs](./TODO.md)
 
+# Evaluator
+- [Compilation of Bindings](./eval/bindings.md)
+- [Builtins](./eval/builtins.md)
+- [Build References](./eval/build-references.md)
+- [Catchable Errors](./eval/catchable-errors.md)
+- [Known Optimisation Potential](./eval/known-optimisation-potential.md)
+- [Langugage Issues](./eval/language-issues.md)
+- [Attrset Opcodes](./eval/opcodes-attrsets.md)
+- [Recursive attribute sets](./eval/recursive-attrs.md)
+- [VM Loop](./eval/vm-loop.md)
+- [Abandoned](./eval/abandoned/index.md)
+  - [Thread-local VM](./eval/abandoned/thread-local-vm.md)
+
+# Store
+- [Store API](./store/api.md)
+- [BlobStore Chunking](./castore/blobstore-chunking.md)
+- [BlobStore Protocol](./castore/blobstore-protocol.md)
+- [Data Model](./castore/data-model.md)
+- [Why not git trees?](./castore/why-not-git-trees.md)
+
 # Nix
 - [Specification of the Nix Language](./language-spec.md)
 - [Nix language version history](./lang-version.md)
 - [Value Pointer Equality](./value-pointer-equality.md)
+- [Daemon Protocol](./nix-daemon/index.md)
+  - [Changelog](./nix-daemon/changelog.md)
+  - [Logging](./nix-daemon/logging.md)
+  - [Operations](./nix-daemon/operations.md)
+  - [Serialization](./nix-daemon/serialization.md)
diff --git a/tvix/docs/src/TODO.md b/tvix/docs/src/TODO.md
index 8fb22ea822..92d7c4cace 100644
--- a/tvix/docs/src/TODO.md
+++ b/tvix/docs/src/TODO.md
@@ -25,17 +25,69 @@ sure noone is working on this, or has some specific design in mind already.
    with a different level of `--strict`, but the toplevel doc-comment suggests
    its generic?
 
+### crate2nix for WASM (@kranzes)
+Most of Tvix is living inside a `//tvix` cargo workspace, and we use `crate2nix`
+as a build system, to get crate-level build granularity (and caching), keeping
+compile times somewhat manageable.
+
+In the future, for Store/Build, we want to build some more web frontends,
+exposing some data by calling to the API. Being able to write this in Rust,
+and reusing most of our existing code dealing with the data structures would
+be preferred.
+
+However, using the crate2nix tooling in combination with compiling for WASM is
+a bumpy ride (and `//web.tvixbolt` works around this by using
+`rustPlatform.buildRustPackage` instead, which invokes cargo inside a FOD):
+
+`buildRustCrate` in nixpkgs (which is used by `crate2nix` under the hood)
+doesn't allow specifying another `--target` explicitly, but relies on the cross
+machinery in nixpkgs exclusively.
+
+`doc/languages-frameworks/rust.section.md` suggests it should be a matter of
+re-instantiating nixpkgs for `wasm32-unknown-unknown`, but that's no recognized
+as a valid architecture.
+The suggested alternative, setting only `rustc.config` to it seems to get us
+further, but the `Crate.nix` logic for detecting arch-conditional crates doesn't
+seem to cover that case, and tries to build crates (`cpufeatures` for `sha{1,2}`)
+which are supposed to be skipped.
+
+## Perf
+ - String Contexts currently do a lot of indirections (edef)
+   (NixString -> NixStringInner -> HashSet[element] -> NixContextElement -> String -> data)
+   to get to the actual data. We should improve this. There's various ideas, one
+   of it is globally interning all Nix context elements, and only keeping
+   indices into that. We might need to have different representations for small
+   amount of context elements or larger ones, and need tooling to reason about
+   the amount of contexts we have.
+ - To calculate NAR size and digest (used for output path calculation of FODs),
+   our current `SimpleRenderer` `NarCalculationService` sequentially asks for
+   one blob after another (and internally these might consists out of multiple
+   chunks too).
+   That's a lot of roundtrips, adding up to a lot of useless waiting.
+   While we cannot avoid having to feed all bytes sequentially through sha256,
+   we already know what blobs to fetch and in which order.
+   There should be a way to buffer some "amount of upcoming bytes" in memory,
+   and not requesting these seqentially.
+   This is somewhat the "spiritual counterpart" to our sequential ingestion
+   code (`ConcurrentBlobUploader`, used by `ingest_nar`), which keeps
+   "some amount of outgoing bytes" in memory.
+
 ### Error cleanup
  - Currently, all services use tvix_castore::Error, which only has two kinds
    (invalid request, storage error), containing an (owned) string.
    This is quite primitive. We should have individual error types for BS, DS, PS.
    Maybe these should have some generics to still be able to carry errors from
    the underlying backend, similar to `IngestionError`.
+   There was an attempt to give PS separate error types (cl/11695), but this
+   ended up very verbose.
+   Every error had to be boxed, and a possible additional message be added. Some
+   errors that didn't wrap another underlying errors were hard to construct, too
+   (requiring the addition of errors). All of this without even having added
+   proper backtrace support, which would be quite helpful in store hierarchies.
+   `anyhow`'s `.context()` gives us most of this out of the box. Maybe we can
+   use that, using enums rather than `&'static str` as context in some cases?
 
 ## Fixes towards correctness
- - `builtins.toXML` is missing string context. See b/398.
- - `builtins.toXML` self-closing tags need to be configurable in a more granular
-   fashion, requires third-party crate support. See b/399.
  - `rnix` only supports string source files, but `NixString` uses bytes (and Nix
    source code might be no valid UTF-8).
 
@@ -75,9 +127,14 @@ Some more fetcher-related builtins need work:
  - `fetchTree` (hairy, seems there's no proper spec and the URL syntax seems
    subject to change/underdocumented)
 
-### Convert builtins:fetchurl to Fetches
-We need to convert `builtins:fetchurl`-style calls to `builtins.derivation` to
-fetches, not Derivations (tracked in `KnownPaths`).
+### `builtins.path` roundtrip for flat
+`builtins.path` currently uses `filtered_ingest` also for the non-recursive
+case, then reads through the blob contents again to get the sha256.
+
+We should take care of assembling the root node on our own, and pipe the data
+through sha256 too (via `InspectReader`, see `glue/fetcher` for an example).
+
+This avoids some roundtrips, and is probably faster.
 
 ### Derivation -> Build
 While we have some support for `structuredAttrs` and `fetchClosure` (at least
@@ -101,9 +158,33 @@ logs etc, but this is something requiring a lot of designing.
 
 ### Store composition
  - Combinators: list-by-priority, first-come-first-serve, cache
- - How do describe hierarchies. URL format too one-dimensional, but we might get
-   quite far with a similar "substituters" concept that Nix uses, to construct
-   the composed stores.
+ - Store composition hierarchies (@yuka).
+   - URL format too one-dimensional.
+   - We want to have nice and simple user-facing substituter config, including
+     sensible default wrappers for caching, retries, fallbacks, as well as
+     granular control for power-users.
+   - Current design idea:
+     - Have a concept similar to rclone config (map with store aliases as
+       keys, allowing to refer to stores by their alias from other parts of
+       the config).
+       It allows both referring to by name, as well as ad-hoc definition:
+       https://rclone.org/docs/#syntax-of-remote-paths
+     - Each store needs to be aware of its "instance name", so it can be
+       included in logs, metrics, …
+     - Have a "instantiation function" traversing such a config data structure,
+       creating store instances and plugging them together, ultimately returning
+       a dyn …Service interface.
+     - No reconfiguration/reconcilation for now
+     - Making URLs the primary data format would get ugly quite easily (hello
+       multiple layers of escaping!), so best to convert the existing URL
+       syntax to our new config format on the fly and then use one codepath
+       to instantiate/assemble. Similarly, something like the "user-facing
+       substituter config" mentioned above could aalso be converted to such a
+       config format under the hood.
+     - Maybe add a ?cache=$other_url parameter support to the URL syntax, to
+       easily wrap a store with a caching frontend, using $other_url as the
+      "near" store URL.
+
 ### Store Config
    There's already serde for some store options (bigtable uses `serde_qs`).
    We might also have common options global over all backends, like chunking
@@ -114,7 +195,22 @@ logs etc, but this is something requiring a lot of designing.
 ### BlobService
  - On the trait side, currently there's no way to distinguish reading a
    known-chunk vs blob, so we might be calling `.chunks()` unnecessarily often.
-   At least for the `object_store` backend, this might be a problem.
+   At least for the `object_store` backend, this might be a problem, causing a
+   lot of round-trips. It also doesn't compose well - every implementation of
+   `BlobService` needs to both solve the "holding metadata about chunking info"
+   as well as "storing chunks" questions.
+   Design idea (@flokli): split these two concerns into two separate traits:
+    - a `ChunkService` dealing with retrieving individual chunks, by their
+      content digests. Chunks are small enough to keep around in contiguous
+      memory.
+    - a `BlobService` storing metadata about blobs.
+
+   Individual stores would not need to implement `BlobReader` anymore, but that
+   could be a global thing with access to the whole store composition layer,
+   which should make it easier to reuse chunks from other backends. Unclear
+   if the write path should be structured the same way. At least for some
+   backends, we want the remote end to be able to decide about chunking.
+
  - While `object_store` recently got support for `Content-Type`
    (https://github.com/apache/arrow-rs/pull/5650), there's no support on the
    local filesystem yet. We'd need to add support to this (through xattrs).
@@ -134,9 +230,10 @@ logs etc, but this is something requiring a lot of designing.
 - Some work ongoing on the worker operation parsing (griff, picnoir)
 
 ### O11Y
- - gRPC trace propagation (cl/10532)
- - `tracing-tracy` (cl/10952)
- - `[tracing-]indicatif` for progress/log reporting (floklis stash)
- - unification into `tvix-tracing` crate, currently a lot of boilerplate
-   in `tvix-store` CLI entrypoint, and half of the boilerplate copied over to
-   `tvix-cli`.
+ - Maybe drop `--log-level` entirely, and only use `RUST_LOG` env exclusively?
+   `debug`,`trace` level across all crates is a bit useless, and `RUST_LOG` can
+   be much more granular…
+ - Trace propagation for HTTP clients too, using
+   https://www.w3.org/TR/trace-context/ or https://www.w3.org/TR/baggage/,
+   whichever makes more sense.
+   Candidates: nix+http(s) protocol, object_store crates.
diff --git a/tvix/docs/src/architecture.md b/tvix/docs/src/architecture.md
index 5e0aa95f1a..02ffdfdcd2 100644
--- a/tvix/docs/src/architecture.md
+++ b/tvix/docs/src/architecture.md
@@ -21,6 +21,15 @@ gRPC. The rest of this document outlines the components.
 
 ### Coordinator
 
+```admonish warning
+Currently there's no separate coordinator. Most of the interaction between
+store, builder and evaluator is done by library code living in the `tvix-glue`
+crate (and `tvix-cli` is a user of it).
+
+Keep in mind some of the statements below are outdated and neither reflect
+reality nor desired design anymore.
+```
+
 *Purpose:* The coordinator (in the simplest case, the Tvix CLI tool)
 oversees the flow of a build process and delegates tasks to the right
 subcomponents. For example, if a user runs the equivalent of
diff --git a/tvix/castore/docs/blobstore-chunking.md b/tvix/docs/src/castore/blobstore-chunking.md
index 49bbe69275..d8c3d54b52 100644
--- a/tvix/castore/docs/blobstore-chunking.md
+++ b/tvix/docs/src/castore/blobstore-chunking.md
@@ -73,11 +73,11 @@ This means one only needs to the root digest to validate a constructions, and th
 constructions can be sent [separately][bao-spec].
 
 This relieves us from the need of having to encode more granular chunking into
-our data model / identifier upfront, but can make this a mostly a transport/
+our data model / identifier upfront, but can make this mostly a transport/
 storage concern.
 
-For the some more description on the (remote) protocol, check
-`./blobstore-protocol.md`.
+For some more description on the (remote) protocol, check
+[BlobStore Protocol](./blobstore-protocol.md).
 
 #### Logical vs. physical chunking
 
diff --git a/tvix/castore/docs/blobstore-protocol.md b/tvix/docs/src/castore/blobstore-protocol.md
index 048cafc3d8..0dff787ccb 100644
--- a/tvix/castore/docs/blobstore-protocol.md
+++ b/tvix/docs/src/castore/blobstore-protocol.md
@@ -41,7 +41,7 @@ It also puts very little requirements on someone implementing a new
 The gRPC protocol is documented in `../protos/rpc_blobstore.proto`.
 Contrary to the `BlobService` trait, it does not have any options for seeking/
 ranging, as it's more desirable to provide this through chunking (see also
-`./blobstore-chunking.md`).
+[BlobStore Chunking](./blobstore-chunking.md).
 
 ## Composition
 Different `BlobStore` are supposed to be "composed"/"layered" to express
@@ -76,7 +76,7 @@ an additional additional field in the response, which would allow clients to
 populate their local chunk store in a single roundtrip.
 
 ## Verified Streaming
-As already described in `./docs/blobstore-chunking.md`, the physical chunk
+As already described in [BlobStore Chunking](./blobstore-chunking.md), the physical chunk
 information sent in a `BlobService.Stat()` response is still sufficient to fetch
 in an authenticated fashion.
 
diff --git a/tvix/castore/docs/data-model.md b/tvix/docs/src/castore/data-model.md
index 2df6761aae..7f7e396a22 100644
--- a/tvix/castore/docs/data-model.md
+++ b/tvix/docs/src/castore/data-model.md
@@ -2,7 +2,7 @@
 
 This provides some more notes on the fields used in castore.proto.
 
-See `//tvix/store/docs/api.md` for the full context.
+See [Store API](../store/api.md) for the full context.
 
 ## Directory message
 `Directory` messages use the blake3 hash of their canonical protobuf
@@ -15,8 +15,8 @@ a directory.
 
 All three message types have a `name` field, specifying the (base)name of the
 element (which MUST not contain slashes or null bytes, and MUST not be '.' or '..').
-For reproducibility reasons, the lists MUST be sorted by that name and also
-MUST be unique across all three lists.
+For reproducibility reasons, the lists MUST be sorted by that name and the
+name MUST be unique across all three lists.
 
 In addition to the `name` field, the various *Node messages have the following
 fields:
@@ -27,7 +27,7 @@ A `DirectoryNode` message represents a child directory.
 It has a `digest` field, which points to the identifier of another `Directory`
 message, making a `Directory` a merkle tree (or strictly speaking, a graph, as
 two elements pointing to a child directory with the same contents would point
-to the same `Directory` message.
+to the same `Directory` message).
 
 There's also a `size` field, containing the (total) number of all child
 elements in the referenced `Directory`, which helps for inode calculation.
diff --git a/tvix/castore/docs/why-not-git-trees.md b/tvix/docs/src/castore/why-not-git-trees.md
index fd46252cf5..4a12b4ef55 100644
--- a/tvix/castore/docs/why-not-git-trees.md
+++ b/tvix/docs/src/castore/why-not-git-trees.md
@@ -48,7 +48,7 @@ The git tree object format uses sha1 both for references to other trees and
 hashes of blobs, which isn't really a hash function to fundamentally base
 everything on in 2023.
 The [migration to sha256][git-sha256] also has been dead for some years now,
-and it's unclear how a "blake3" version of this would even look like.
+and it's unclear what a "blake3" version of this would even look like.
 
 [bao]: https://github.com/oconnor663/bao
 [blake3]: https://github.com/BLAKE3-team/BLAKE3
diff --git a/tvix/docs/src/eval/abandoned/index.md b/tvix/docs/src/eval/abandoned/index.md
new file mode 100644
index 0000000000..1cef704d08
--- /dev/null
+++ b/tvix/docs/src/eval/abandoned/index.md
@@ -0,0 +1,3 @@
+# Abandoned ideas
+
+This chapter keeps track of abandoned ideas, and why they were abandoned.
diff --git a/tvix/eval/docs/abandoned/thread-local-vm.md b/tvix/docs/src/eval/abandoned/thread-local-vm.md
index c6a2d5e07e..c6a2d5e07e 100644
--- a/tvix/eval/docs/abandoned/thread-local-vm.md
+++ b/tvix/docs/src/eval/abandoned/thread-local-vm.md
diff --git a/tvix/eval/docs/bindings.md b/tvix/docs/src/eval/bindings.md
index 2b062cb13d..e1c214e49f 100644
--- a/tvix/eval/docs/bindings.md
+++ b/tvix/docs/src/eval/bindings.md
@@ -62,7 +62,7 @@ This is done by compiling bindings in several phases:
 
    At the end of this phase, we know the stack slots of all namespaces for
    inheriting from, all values inherited from them, and all values (and
-   optionall keys) of bindings at the current level.
+   optionally keys) of bindings at the current level.
 
    Only statically known keys are actually merged, so any dynamic keys that
    conflict will lead to a "key already defined" error at runtime.
@@ -82,8 +82,10 @@ stack when the scope ends.
 
 ## Moving parts
 
-WARNING: This documents the *current* implementation. If you only care about the
+```admonish caution
+This documents the *current* implementation. If you only care about the
 conceptual aspects, see above.
+```
 
 There's a few types involved:
 
diff --git a/tvix/eval/docs/build-references.md b/tvix/docs/src/eval/build-references.md
index badcea1155..cfa569c04a 100644
--- a/tvix/eval/docs/build-references.md
+++ b/tvix/docs/src/eval/build-references.md
@@ -23,8 +23,10 @@ formats:
    This format is used for a special case where a derivation attribute directly
    refers to a derivation path (e.g. by accessing `.drvPath` on a derivation).
 
-   Note: In C++ Nix this case is quite special and actually requires a
-   store-database query during evaluation.
+   ```admonish note
+   In C++ Nix this case is quite special and actually requires a store-database
+   query during evaluation.
+   ```
 
 3. `<path>` - a non-descript store path input, usually a plain source file (e.g.
    from something like `src = ./.` or `src = ./foo.txt`).
@@ -90,8 +92,10 @@ C++ Nix has several builtins that interface directly with string contexts:
 * `unsafeDiscardOutputDependency`: drops dependencies on the *outputs* of a
   `.drv` in the context, passing only the literal `.drv` itself
 
-  Note: This is only used for special test-cases in nixpkgs, and deprecated Nix
+  ```admonish note
+  This is only used for special test-cases in nixpkgs, and deprecated Nix
   commands like `nix-push`.
+  ```
 * `getContext`: returns the string context in serialised form as a Nix attribute
   set
 * `appendContext`: adds a given string context to the string in the same format
@@ -159,8 +163,10 @@ one evaluation should be created in Nix. This metadata needs to be available in
 These queries will need to be asked of the metadata when populating the
 derivation fields.
 
-Note: Depending on how we implement `builtins.placeholder`, it might be useful
+```admonish note
+Depending on how we implement `builtins.placeholder`, it might be useful
 to track created placeholders in this metadata, too.
+```
 
 ### Context builtins
 
diff --git a/tvix/eval/docs/builtins.md b/tvix/docs/src/eval/builtins.md
index dba4c48c65..dba4c48c65 100644
--- a/tvix/eval/docs/builtins.md
+++ b/tvix/docs/src/eval/builtins.md
diff --git a/tvix/eval/docs/catchable-errors.md b/tvix/docs/src/eval/catchable-errors.md
index ce320a9217..ce320a9217 100644
--- a/tvix/eval/docs/catchable-errors.md
+++ b/tvix/docs/src/eval/catchable-errors.md
diff --git a/tvix/eval/docs/known-optimisation-potential.md b/tvix/docs/src/eval/known-optimisation-potential.md
index 0ab185fe1b..0ab185fe1b 100644
--- a/tvix/eval/docs/known-optimisation-potential.md
+++ b/tvix/docs/src/eval/known-optimisation-potential.md
diff --git a/tvix/eval/docs/language-issues.md b/tvix/docs/src/eval/language-issues.md
index 152e6594a1..152e6594a1 100644
--- a/tvix/eval/docs/language-issues.md
+++ b/tvix/docs/src/eval/language-issues.md
diff --git a/tvix/eval/docs/opcodes-attrsets.md b/tvix/docs/src/eval/opcodes-attrsets.md
index 7026f3319d..7026f3319d 100644
--- a/tvix/eval/docs/opcodes-attrsets.md
+++ b/tvix/docs/src/eval/opcodes-attrsets.md
diff --git a/tvix/eval/docs/recursive-attrs.md b/tvix/docs/src/eval/recursive-attrs.md
index c30cfd33e6..c30cfd33e6 100644
--- a/tvix/eval/docs/recursive-attrs.md
+++ b/tvix/docs/src/eval/recursive-attrs.md
diff --git a/tvix/eval/docs/vm-loop.md b/tvix/docs/src/eval/vm-loop.md
index 6266d34709..6266d34709 100644
--- a/tvix/eval/docs/vm-loop.md
+++ b/tvix/docs/src/eval/vm-loop.md
diff --git a/tvix/docs/src/language-spec.md b/tvix/docs/src/language-spec.md
index 0ff1dc491e..b3908b2cf4 100644
--- a/tvix/docs/src/language-spec.md
+++ b/tvix/docs/src/language-spec.md
@@ -1,8 +1,10 @@
 # Specification of the Nix Language
 
-WARNING: This document is a work in progress. Please keep an eye on
+```admonish attention
+This document is a work in progress. Please keep an eye on
 [`topic:nix-spec`](https://cl.tvl.fyi/q/topic:nix-spec) for ongoing
 CLs.
+```
 
 Nix is a general-purpose, functional programming language which this
 document aims to describe.
diff --git a/tvix/docs/src/nix-daemon/changelog.md b/tvix/docs/src/nix-daemon/changelog.md
new file mode 100644
index 0000000000..bc99dc6af0
--- /dev/null
+++ b/tvix/docs/src/nix-daemon/changelog.md
@@ -0,0 +1,202 @@
+
+
+## Nix version protocol
+
+| Nix version     | Protocol |
+| --------------- | -------- |
+| 0.11            | 1.02     |
+| 0.12            | 1.04     |
+| 0.13            | 1.05     |
+| 0.14            | 1.05     |
+| 0.15            | 1.05     |
+| 0.16            | 1.06     |
+| 1.0             | 1.10     |
+| 1.1             | 1.11     |
+| 1.2             | 1.12     |
+| 1.3 - 1.5.3     | 1.13     |
+| 1.6 - 1.10      | 1.14     |
+| 1.11 - 1.11.16  | 1.15     |
+| 2.0 - 2.0.4     | 1.20     |
+| 2.1 - 2.3.18    | 1.21     |
+| 2.4 - 2.6.1     | 1.32     |
+| 2.7.0           | 1.33     |
+| 2.8.0 - 2.14.1  | 1.34     |
+| 2.15.0 - 2.19.4 | 1.35     |
+| 2.20.0 - 2.22.0 | 1.37     |
+
+In commit [be64fbb501][be64fbb501] support was droped for protocol versions older than 1.10.
+This happened when the protocol was between 1.17 and 1.18 and was released with Nix 2.0.
+So this means that any version of Nix 2.x can't talk to Nix 0.x.
+
+## Operation History
+
+| Op              | Id | Commit         | Protocol | Nix Version | Notes |
+| --------------- | -- | -------------- | -------- | ----------- | ----- |
+| *Quit           | 0  | [a711689368][a711689368] || 0.11 | Became dead code in [7951c3c54][7951c3c54] (Nix 0.11) and removed in [d3c61d83b][d3c61d83b] (Nix 1.8) |
+| IsValidPath     | 1  | [a711689368][a711689368] || 0.11 ||
+| HasSubstitutes  | 3  | [0565b5f2b3][0565b5f2b3] || 0.11 ||
+| QueryPathHash   | 4  | [0565b5f2b3][0565b5f2b3] || 0.11 | Obsolete [e0204f8d46][e0204f8d46]<br>Nix 2.0 Protocol 1.16 |
+| QueryReferences | 5  | [0565b5f2b3][0565b5f2b3] || 0.11 | Obsolete [e0204f8d46][e0204f8d46]<br>Nix 2.0 Protocol 1.16 |
+| QueryReferrers  | 6  | [0565b5f2b3][0565b5f2b3] || 0.11 ||
+| AddToStore      | 7  | [0263279071][0263279071] || 0.11 ||
+| AddTextToStore  | 8  | [0263279071][0263279071] || 0.11 | Obsolete [c602ebfb34][c602ebfb34]<br>Nix 2.4 Protocol 1.25 |
+| BuildPaths      | 9  | [0565b5f2b3][0565b5f2b3] || 0.11 ||
+| EnsurePath      | 10 | [0565b5f2b3][0565b5f2b3] || 0.11 ||
+| AddTempRoot     | 11 | [e25fad691a][e25fad691a] || 0.11 ||
+| AddIndirectRoot | 12 | [74033a844f][74033a844f] || 0.11 ||
+| SyncWithGC      | 13 | [e25fad691a][e25fad691a] || 0.11 | Obsolete [9947f1646a][9947f1646a]<br> Nix 2.5.0 Protocol 1.32 |
+| FindRoots       | 14 | [29cf434a35][29cf434a35] || 0.11 ||
+| *CollectGarbage | 15 | [a9c4f66cfb][a9c4f66cfb] || 0.11 | Removed [a72709afd8][a72709afd8]<br>Nix 0.12 Protocol 1.02 |
+| ExportPath      | 16 | [0f5da8a83c][0f5da8a83c] || 0.11 | Obsolete [538a64e8c3][538a64e8c3]<br>Nix 2.0 Protocol 1.17 |
+| *ImportPath     | 17 | [0f5da8a83c][0f5da8a83c] || 0.11 | Removed [273b288a7e][273b288a7e]<br>Nix 1.0 Protocol 1.09 |
+| QueryDeriver    | 18 | [6d1a1191b0][6d1a1191b0] || 0.11 | Obsolete [e0204f8d46][e0204f8d46]<br>Nix 2.0 Protocol 1.16 |
+| SetOptions      | 19 | [f3441e6122][f3441e6122] || 0.11 ||
+| CollectGarbage              | 20 | [a72709afd8][a72709afd8] | 1.02  | 0.12   ||
+| QuerySubstitutablePathInfo  | 21 | [03427e76f1][03427e76f1] | 1.02  | 0.12   ||
+| QueryDerivationOutputs      | 22 | [e42401ee7b][e42401ee7b] | 1.05  | 1.0    | Obsolete [d38f860c3e][d38f860c3e]<br>Nix 2.4 Protocol 1.22* |
+| QueryAllValidPaths          | 23 | [24035b98b1][24035b98b1] | 1.05  | 1.0    ||
+| *QueryFailedPaths            | 24 | [f92c9a0ac5][f92c9a0ac5] | 1.05  | 1.0    | Removed [8cffec848][8cffec848]<br>Nix 2.0 Protocol 1.16 |
+| *ClearFailedPaths            | 25 | [f92c9a0ac5][f92c9a0ac5] | 1.05  | 1.0    | Removed [8cffec848][8cffec848]<br>Nix 2.0 Protocol 1.16 |
+| QueryPathInfo               | 26 | [1db6259076][1db6259076] | 1.06  | 1.0    ||
+| ImportPaths                 | 27 | [273b288a7e][273b288a7e] | 1.09  | 1.0    | Obsolete [538a64e8c3][538a64e8c3]<br>Nix 2.0 Protocol 1.17 |
+| QueryDerivationOutputNames  | 28 | [af2e53fd48][af2e53fd48]<br>([194d21f9f6][194d21f9f6]) | 1.08      | 1.0 | Obsolete<br>[045b07200c][045b07200c]<br>Nix 2.4 Protocol 1.21 |
+| QueryPathFromHashPart       | 29 | [ccc52adfb2][ccc52adfb2] | 1.11  | 1.1    ||
+| QuerySubstitutablePathInfos | 30 | [eb3036da87][eb3036da87] | 1.12* | 1.2    ||
+| QueryValidPaths             | 31 | [58ef4d9a95][58ef4d9a95] | 1.12  | 1.2    ||
+| QuerySubstitutablePaths     | 32 | [09a6321aeb][09a6321aeb] | 1.12  | 1.2    ||
+| QueryValidDerivers          | 33 | [2754a07ead][2754a07ead] | 1.13* | 1.3    ||
+| OptimiseStore               | 34 | [8fb8c26b6d][2754a07ead] | 1.14  | 1.8    ||
+| VerifyStore                 | 35 | [b755752f76][b755752f76] | 1.14  | 1.9    ||
+| BuildDerivation             | 36 | [71a5161365][71a5161365] | 1.14  | 1.10   ||
+| AddSignatures               | 37 | [d0f5719c2a][d0f5719c2a] | 1.16  | 2.0    ||
+| NarFromPath                 | 38 | [b4b5e9ce2f][b4b5e9ce2f] | 1.17  | 2.0    ||
+| AddToStoreNar               | 39 | [584f8a62de][584f8a62de] | 1.17  | 2.0    ||
+| QueryMissing                | 40 | [ba20730b3f][ba20730b3f] | 1.19* | 2.0    ||
+| QueryDerivationOutputMap    | 41 | [d38f860c3e][d38f860c3e] | 1.22* | 2.4    ||
+| RegisterDrvOutput           | 42 | [58cdab64ac][58cdab64ac] | 1.27  | 2.4    ||
+| QueryRealisation            | 43 | [58cdab64ac][58cdab64ac] | 1.27  | 2.4    ||
+| AddMultipleToStore          | 44 | [fe1f34fa60][fe1f34fa60] | 1.32* | 2.4    ||
+| AddBuildLog                 | 45 | [4dda1f92aa][4dda1f92aa] | 1.32  | 2.6.0  ||
+| BuildPathsWithResults       | 46 | [a4604f1928][a4604f1928] | 1.34* | 2.8.0  ||
+| AddPermRoot                 | 47 | [226b0f3956][226b0f3956] | 1.36* | 2.20.0 ||
+
+Notes: Ops that start with * have been removed.
+Protocol version that ends with * was bumped while adding that operation. Otherwise protocol version referes to the protocol version at the time the operation was added (so only at the next protocol version can you assume the operation is present/removed/obsolete since it was added/removed/obsoleted between protocol versions).
+
+## Protocol version change log
+
+- 1.01 [f3441e6122][f3441e6122] Initial Version
+- 1.02 [c370755583][c370755583] Use build hook
+- 1.03 [db4f4a8425][db4f4a8425] Backward compatibility check
+- 1.04 [96598e7b06][96598e7b06] SetOptions buildVerbosity
+- 1.05 [60ec75048a][60ec75048a] SetOptions useAtime & maxAtime
+- 1.06 [6846ed8b44][6846ed8b44] SetOptions buildCores
+- 1.07 [bdf089f463][bdf089f463] QuerySubstitutablePathInfo narSize
+- 1.08 [b1eb252172][b1eb252172] STDERR_ERROR exit status
+- 1.09 [e0bd307802][e0bd307802] ImportPath not supported on versions older than 1.09
+- 1.10 [db5b86ef13][db5b86ef13] SetOptions build-use-substitutess
+- 1.11 [4bc4da331a][4bc4da331a] open connection reserveSpace
+- 1.12 [eb3036da87][eb3036da87] Implement QuerySubstitutablePathInfos
+- 1.13 [2754a07ead][2754a07ead] Implement QueryValidDerivers
+- 1.14 [a583a2bc59][a583a2bc59] open connection cpu affinity
+- 1.15 [d1e3bf01bc][d1e3bf01bc] BuildPaths buildMode
+- 1.16 [9cee600c88][9cee600c88] QueryPathInfo ultimate & sigs
+- 1.17 [ddea253ff8][ddea253ff8] QueryPathInfo returns valid bool
+- 1.18 [4b8f1b0ec0][4b8f1b0ec0] Select between AddToStoreNar and ImportPaths
+- 1.19 [ba20730b3f][ba20730b3f] Implement QueryMissing
+- 1.20 [cfc8132391][cfc8132391] Don't send activity and result logs to old clients
+- 1.21 [6185d25e52][6185d25e52] AddToStoreNar uses TunnelLogger for data
+- 1.22 [d38f860c3e][d38f860c3e] Implement QueryDerivationOutputMap and obsolete QueryDerivationOutputs
+- 1.23 [4c0077a07d][4c0077a07d] AddToStoreNar uses FramedSink/-Source for data
+- 1.24 [5ccd94501d][5ccd94501d] Allow trustless building of CA derivations
+- 1.25 [e34fe47d0c][e34fe47d0c] New implementation of AddToStore
+- 1.26 [c43e882f54][c43e882f54] STDERR_ERROR serialize exception
+- 1.27 [3a63fc6cd5][3a63fc6cd5] QueryValidPaths substitute flag
+- 1.28 [27b5747ca7][27b5747ca7] BuildDerivation returns builtOutputs
+- 1.29 [9d309de0de][9d309de0de] BuildDerivation returns timesBuilt, isNonDeterministic, startTime & stopTime
+- 1.30 [e5951a6b2f][e5951a6b2f] Bump version number for DerivedPath changes
+- 1.31 [a8416866cf][a8416866cf] RegisterDrvOutput & QueryRealisation send realisations as JSON
+- 1.32 [fe1f34fa60][fe1f34fa60] Implement AddMultipleToStore
+- 1.33 [35dbdbedd4][35dbdbedd4] open connection sends nix version
+- 1.34 [a4604f1928][a4604f1928] Implement BuildPathsWithResults
+- 1.35 [9207f94582][9207f94582] open connection sends trusted option
+- 1.36 [226b0f3956][226b0f3956] Implement AddPermRoot
+- 1.37 [1e3d811840][1e3d811840] Serialize BuildResult send cpuUser & cpuSystem
+
+
+
+[0263279071]: https://github.com/NixOS/nix/commit/0263279071
+[03427e76f1]: https://github.com/NixOS/nix/commit/03427e76f1
+[045b07200c]: https://github.com/NixOS/nix/commit/045b07200c
+[0565b5f2b3]: https://github.com/NixOS/nix/commit/0565b5f2b3
+[09a6321aeb]: https://github.com/NixOS/nix/commit/09a6321aeb
+[0f5da8a83c]: https://github.com/NixOS/nix/commit/0f5da8a83c
+[194d21f9f6]: https://github.com/NixOS/nix/commit/194d21f9f6
+[1db6259076]: https://github.com/NixOS/nix/commit/1db6259076
+[1e3d811840]: https://github.com/NixOS/nix/commit/1e3d811840
+[24035b98b1]: https://github.com/NixOS/nix/commit/24035b98b1
+[226b0f3956]: https://github.com/NixOS/nix/commit/226b0f3956
+[273b288a7e]: https://github.com/NixOS/nix/commit/273b288a7e
+[2754a07ead]: https://github.com/NixOS/nix/commit/2754a07ead
+[27b5747ca7]: https://github.com/NixOS/nix/commit/27b5747ca7
+[29cf434a35]: https://github.com/NixOS/nix/commit/29cf434a35
+[35dbdbedd4]: https://github.com/NixOS/nix/commit/35dbdbedd4
+[3a63fc6cd5]: https://github.com/NixOS/nix/commit/3a63fc6cd5
+[4b8f1b0ec0]: https://github.com/NixOS/nix/commit/4b8f1b0ec0
+[4bc4da331a]: https://github.com/NixOS/nix/commit/4bc4da331a
+[4c0077a07d]: https://github.com/NixOS/nix/commit/4c0077a07d
+[4dda1f92aa]: https://github.com/NixOS/nix/commit/4dda1f92aa
+[538a64e8c3]: https://github.com/NixOS/nix/commit/538a64e8c3
+[584f8a62de]: https://github.com/NixOS/nix/commit/584f8a62de
+[58cdab64ac]: https://github.com/NixOS/nix/commit/58cdab64ac
+[58ef4d9a95]: https://github.com/NixOS/nix/commit/58ef4d9a95
+[5ccd94501d]: https://github.com/NixOS/nix/commit/5ccd94501d
+[60ec75048a]: https://github.com/NixOS/nix/commit/60ec75048a
+[6185d25e52]: https://github.com/NixOS/nix/commit/6185d25e52
+[6846ed8b44]: https://github.com/NixOS/nix/commit/6846ed8b44
+[6d1a1191b0]: https://github.com/NixOS/nix/commit/6d1a1191b0
+[71a5161365]: https://github.com/NixOS/nix/commit/71a5161365
+[74033a844f]: https://github.com/NixOS/nix/commit/74033a844f
+[7951c3c54]: https://github.com/NixOS/nix/commit/7951c3c54
+[8cffec848]: https://github.com/NixOS/nix/commit/8cffec848
+[8fb8c26b6d]: https://github.com/NixOS/nix/commit/8fb8c26b6d
+[9207f94582]: https://github.com/NixOS/nix/commit/9207f94582
+[96598e7b06]: https://github.com/NixOS/nix/commit/96598e7b06
+[9947f1646a]: https://github.com/NixOS/nix/commit/9947f1646a
+[9cee600c88]: https://github.com/NixOS/nix/commit/9cee600c88
+[9d309de0de]: https://github.com/NixOS/nix/commit/9d309de0de
+[a4604f1928]: https://github.com/NixOS/nix/commit/a4604f1928
+[a583a2bc59]: https://github.com/NixOS/nix/commit/a583a2bc59
+[a711689368]: https://github.com/NixOS/nix/commit/a711689368
+[a72709afd8]: https://github.com/NixOS/nix/commit/a72709afd8
+[a8416866cf]: https://github.com/NixOS/nix/commit/a8416866cf
+[a9c4f66cfb]: https://github.com/NixOS/nix/commit/a9c4f66cfb
+[af2e53fd48]: https://github.com/NixOS/nix/commit/af2e53fd48
+[b1eb252172]: https://github.com/NixOS/nix/commit/b1eb252172
+[b4b5e9ce2f]: https://github.com/NixOS/nix/commit/b4b5e9ce2f
+[b755752f76]: https://github.com/NixOS/nix/commit/b755752f76
+[ba20730b3f]: https://github.com/NixOS/nix/commit/ba20730b3f
+[bdf089f463]: https://github.com/NixOS/nix/commit/bdf089f463
+[be64fbb501]: https://github.com/NixOS/nix/commit/be64fbb501
+[c370755583]: https://github.com/NixOS/nix/commit/c370755583
+[c43e882f54]: https://github.com/NixOS/nix/commit/c43e882f54
+[c602ebfb34]: https://github.com/NixOS/nix/commit/c602ebfb34
+[ccc52adfb2]: https://github.com/NixOS/nix/commit/ccc52adfb2
+[cfc8132391]: https://github.com/NixOS/nix/commit/cfc8132391
+[d0f5719c2a]: https://github.com/NixOS/nix/commit/d0f5719c2a
+[d1e3bf01bc]: https://github.com/NixOS/nix/commit/d1e3bf01bc
+[d38f860c3e]: https://github.com/NixOS/nix/commit/d38f860c3e
+[d3c61d83b]: https://github.com/NixOS/nix/commit/d3c61d83b
+[db4f4a8425]: https://github.com/NixOS/nix/commit/db4f4a8425
+[db5b86ef13]: https://github.com/NixOS/nix/commit/db5b86ef13
+[ddea253ff8]: https://github.com/NixOS/nix/commit/ddea253ff8
+[e0204f8d46]: https://github.com/NixOS/nix/commit/e0204f8d46
+[e0bd307802]: https://github.com/NixOS/nix/commit/e0bd307802
+[e25fad691a]: https://github.com/NixOS/nix/commit/e25fad691a
+[e34fe47d0c]: https://github.com/NixOS/nix/commit/e34fe47d0c
+[e42401ee7b]: https://github.com/NixOS/nix/commit/e42401ee7b
+[e5951a6b2f]: https://github.com/NixOS/nix/commit/e5951a6b2f
+[eb3036da87]: https://github.com/NixOS/nix/commit/eb3036da87
+[f3441e6122]: https://github.com/NixOS/nix/commit/f3441e6122
+[f92c9a0ac5]: https://github.com/NixOS/nix/commit/f92c9a0ac5
+[fe1f34fa60]: https://github.com/NixOS/nix/commit/fe1f34fa60
diff --git a/tvix/docs/src/nix-daemon/index.md b/tvix/docs/src/nix-daemon/index.md
new file mode 100644
index 0000000000..e47c20151e
--- /dev/null
+++ b/tvix/docs/src/nix-daemon/index.md
@@ -0,0 +1,15 @@
+# Nix Daemon Protocol
+
+The Nix Daemon protocol is what's used to communicate with the `nix-daemon`,
+either on the local system (in which case the communication happens via a Unix
+domain socket), or with a remote Nix (in which this is tunneled over SSH).
+
+It uses a custom binary format which isn't too documented. The subpages here
+collect serve as an in-depth detail about some of the inner workings, data types
+etc.
+
+A first implementation of this exists in
+[griff/Nix.rs](https://github.com/griff/Nix.rs/tree/main).
+
+Work is underway to port / factor this out into reusable building blocks into
+the [nix-compat] crate.
diff --git a/tvix/docs/src/nix-daemon/logging.md b/tvix/docs/src/nix-daemon/logging.md
new file mode 100644
index 0000000000..70fe0882fa
--- /dev/null
+++ b/tvix/docs/src/nix-daemon/logging.md
@@ -0,0 +1,122 @@
+# Logging
+
+Because the daemon protocol only has one sender stream and one receiver stream
+logging messages need to be carefully interleaved with requests and responses.
+Usually this means that after the operation and all of its inputs (the request)
+has been read logging hijacks the sender stream (in the server case) and uses
+it to send typed logging messages while the request is being processed. When
+the response has been generated it will send `STDERR_LAST` to mark that what
+follows is the response data to the request. If the request failed a
+`STDERR_ERROR` message is sent with the error and no response is sent.
+
+While not in this state between request reading and response sending all
+messages and activities are buffered until next time the logger can send data.
+
+The logging messages supported are:
+- `STDERR_LAST`
+- `STDERR_ERROR`
+- `STDERR_NEXT`
+- `STDERR_READ`
+- `STDERR_WRITE`
+- `STDERR_START_ACTIVITY`
+- `STDERR_STOP_ACTIVITY`
+- `STDERR_RESULT`
+
+
+### `STDERR_LAST`
+Marks the end of the logs, normal processing can resume.
+
+- 0x616c7473 :: [UInt64][se-UInt64] (hardcoded)
+
+### `STDERR_ERROR`
+This also marks the end of this log "session" and so it
+has the same effect as `STDERR_LAST`.
+On the client the error is thrown as an exception and no response is read.
+
+#### If protocol version is 1.26 or newer
+- 0x63787470 :: [UInt64][se-UInt64] (hardcoded)
+- error :: [Error][se-Error]
+
+#### If protocol version is older than 1.26
+- 0x63787470 :: [UInt64][se-UInt64] (hardcoded)
+- msg :: [String][se-String]
+- exitStatus :: [Int][se-Int]
+
+
+### `STDERR_NEXT`
+Normal string log message.
+
+- 0x6f6c6d67 :: [UInt64][se-UInt64] (hardcoded)
+- msg :: [String][se-String]
+
+
+### `STDERR_READ`
+Reader interface used by ImportsPaths and AddToStoreNar (between 1.21 and 1.23).
+It works by sending a desired buffer length and then on the receiver stream it
+reads bytes buffer of that length. If it receives 0 bytes it sees this as an
+unexpected EOF.
+
+- 0x64617461 :: [UInt64][se-UInt64] (hardcoded)
+- desiredLen :: [Size][se-Size]
+
+### `STDERR_WRITE`
+Writer interface used by ExportPath. Simply writes a buffer.
+
+- 0x64617416 :: [UInt64][se-UInt64] (hardcoded)
+- buffer :: [Bytes][se-Bytes]
+
+### `STDERR_START_ACTIVITY`
+Begins an activity. In other tracing frameworks this would be called a span.
+
+Implemented in protocol 1.20. To achieve backwards compatible with older
+versions of the protocol instead of sending an `STDERR_START_ACTIVITY`
+the level is checked against enabled logging level and the text field is
+sent as a simple log message with `STDERR_NEXT`.
+
+- 0x53545254 :: [UInt64][se-UInt64] (hardcoded)
+- act :: [UInt64][se-UInt64]
+- level :: [Verbosity][se-Verbosity]
+- type :: [ActivityType][se-ActivityType]
+- text :: [String][se-String]
+- fields :: [List][se-List] of [Field][se-Field]
+- parent :: [UInt64][se-UInt64]
+
+
+act is atomic (nextId++ + (getPid() << 32))
+
+
+### `STDERR_STOP_ACTIVITY`
+Stops the given activity. The activity id should not send any more results.
+Just sends `ActivityId`.
+
+Implemented in protocol 1.20. When backwards compatible with older versions of
+the protocol and this message would have been sent it is instead ignored.
+
+- 0x53544f50 :: [UInt64][se-UInt64] (hardcoded)
+
+
+### `STDERR_RESULT`
+Sends results for a given activity.
+
+Implemented in protocol 1.20. When backwards compatible with older versions of
+the protocol and this message would have been sent it is instead ignored.
+
+- 0x52534c54 :: [UInt64][se-UInt64] (hardcoded)
+- act :: [UInt64][se-UInt64]
+- type :: [ResultType][se-ResultType]
+- fields :: [List][se-List] of [Field][se-Field]
+
+
+
+
+[se-UInt64]: ./serialization.md#uint64
+[se-Int]: ./serialization.md#int
+[se-Size]: ./serialization.md#size
+[se-Verbosity]: ./serialization.md#verbosity
+[se-ActivityType]: ./serialization.md#activitytype
+[se-ResultType]: ./serialization.md#resulttype
+[se-Bytes]: ./serialization.md#bytes
+[se-String]: ./serialization.md#string
+[se-List]: ./serialization.md#list-of-x
+[se-Error]: ./serialization.md#error
+[se-Field]: ./serialization.md#field
\ No newline at end of file
diff --git a/tvix/docs/src/nix-daemon/operations.md b/tvix/docs/src/nix-daemon/operations.md
new file mode 100644
index 0000000000..0683ab0709
--- /dev/null
+++ b/tvix/docs/src/nix-daemon/operations.md
@@ -0,0 +1,894 @@
+
+# TOC
+
+| Operation                                                   | Id |
+| ----------------------------------------------------------- | -- |
+| [IsValidPath](#isvalidpath)                                 | 1  |
+| [HasSubstitutes](#hassubstitutes)                           | 3  |
+| [QueryReferrers](#queryreferrers)                           | 6  |
+| [AddToStore](#addtostore)                                   | 7  |
+| [BuildPaths](#buildpaths)                                   | 9  |
+| [EnsurePath](#ensurepath)                                   | 10 |
+| [AddTempRoot](#addtemproot)                                 | 11 |
+| [AddIndirectRoot](#addindirectroot)                         | 12 |
+| [FindRoots](#findroots)                                     | 14 |
+| [SetOptions](#setoptions)                                   | 19 |
+| [CollectGarbage](#collectgarbage)                           | 20 |
+| [QuerySubstitutablePathInfo](#querysubstitutablepathinfo)   | 21 |
+| [QueryAllValidPaths](#queryallvalidpaths)                   | 23 |
+| [QueryPathInfo](#querypathinfo)                             | 26 |
+| [QueryPathFromHashPart](#querypathfromhashpart)             | 29 |
+| [QuerySubstitutablePathInfos](#querysubstitutablepathinfos) | 30 |
+| [QueryValidPaths](#queryvalidpaths)                         | 31 |
+| [QuerySubstitutablePaths](#querysubstitutablepaths)         | 32 |
+| [QueryValidDerivers](#queryvalidderivers)                   | 33 |
+| [OptimiseStore](#optimisestore)                             | 34 |
+| [VerifyStore](#verifystore)                                 | 35 |
+| [BuildDerivation](#buildderivation)                         | 36 |
+| [AddSignatures](#addsignatures)                             | 37 |
+| [NarFromPath](#narfrompath)                                 | 38 |
+| [AddToStoreNar](#addtostore)                                | 39 |
+| [QueryMissing](#querymissing)                               | 40 |
+| [QueryDerivationOutputMap](#queryderivationoutputmap)       | 41 |
+| [RegisterDrvOutput](#registerdrvoutput)                     | 42 |
+| [QueryRealisation](#queryrealisation)                       | 43 |
+| [AddMultipleToStore](#addmultipletostore)                   | 44 |
+| [AddBuildLog](#addbuildlog)                                 | 45 |
+| [BuildPathsWithResults](#buildpathswithresults)             | 46 |
+| [AddPermRoot](#addpermroot)                                 | 47 |
+
+
+## Obsolete operations
+
+| Operation                                                 | Id |
+| --------------------------------------------------------- | -- |
+| [QueryPathHash](#querypathhash)                           | 4  |
+| [QueryReferences](#queryreferences)                       | 5  |
+| [AddTextToStore](#addtexttostore)                         | 8  |
+| [SyncWithGC](#syncwithgc)                                 | 13 |
+| [ExportPath](#exportpath)                                 | 16 |
+| [QueryDeriver](#queryderiver)                             | 18 |
+| [QueryDerivationOutputs](#queryderivationoutputs)         | 22 |
+| [ImportPaths](#importpaths)                               | 27 |
+| [QueryDerivationOutputNames](#queryderivationoutputnames) | 28 |
+
+
+## Removed operations
+
+| Operation                                         | Id |
+| ------------------------------------------------- | -- |
+| [Quit](#quit-removed)                             | 0  |
+| [ImportPath](#importpath-removed)                 | 17 |
+| [Old CollectGarbage](#old-collectgarbage-removed) | 15 |
+| [QueryFailedPaths](#queryfailedpaths)             | 24 |
+| [ClearFailedPaths](#clearfailedpaths)             | 25 |
+
+
+
+## Quit (removed)
+
+**Id:** 0<br>
+**Introduced:** Nix 0.11<br>
+**Removed:** Became dead code in Nix 0.11 and removed in Nix 1.8
+
+
+## IsValidPath
+
+**Id:** 1<br>
+**Introduced:** Nix 0.11<br>
+
+As the name says checks that a store path is valid i.e. in the store.
+
+This is a pretty core operation used everywhere.
+
+### Inputs
+path :: [StorePath][se-StorePath]
+
+### Outputs
+isValid :: [Bool][se-Bool]
+
+
+## HasSubstitutes
+
+**Id:** 3<br>
+**Introduced:** Nix 0.11<br>
+
+Checks if we can substitute the input path from a substituter. Uses
+QuerySubstitutablePaths under the hood :/
+
+### Inputs
+path :: [StorePath][se-StorePath]
+
+### Outputs
+hasSubstitutes :: [Bool][se-Bool]
+
+
+## QueryPathHash
+
+**Id:** 4<br>
+**Introduced:** Nix 0.11<br>
+**Obsolete:** Protocol 1.16, Nix 2.0<br>
+
+Retrieves the base16 NAR hash of a given store path.
+
+### Inputs
+path :: [StorePath][se-StorePath]
+
+### Outputs
+hash :: [String][se-String] (base16-encoded NAR hash without algorithm prefix)
+
+
+## QueryReferences
+
+**Id:** 5<br>
+**Introduced:** Nix 0.11<br>
+**Obsolete:** Protocol 1.16, Nix 2.0<br>
+
+Retrieves the references of a given path
+
+### Inputs
+path :: [StorePath][se-StorePath]
+
+### Outputs
+references :: [List][se-List] of [StorePath][se-StorePath]
+
+
+## QueryReferrers
+
+**Id:** 6<br>
+**Introduced:** Nix 0.11<br>
+
+Retrieves the referrers of a given path.
+
+### Inputs
+path :: [StorePath][se-StorePath]
+
+### Outputs
+referrers :: [List][se-List] of [StorePath][se-StorePath]
+
+
+## AddToStore
+
+**Id:** 7<br>
+**Introduced:** Nix 0.11<br>
+
+Add a new path to the store.
+
+### Before protocol version 1.25
+#### Inputs
+- baseName :: [String][se-String]
+- fixed :: [Bool64][se-Bool64]
+- recursive :: [FileIngestionMethod][se-FileIngestionMethod]
+- hashAlgo :: [String][se-String]
+- NAR dump
+
+If fixed is `true`, hashAlgo is forced to `sha256` and recursive is forced to
+`Recursive`.
+
+Only `Flat` and `Recursive` values are supported for the recursive input
+parameter.
+
+#### Outputs
+path :: [StorePath][se-StorePath]
+
+### Protocol version 1.25 or newer
+#### Inputs
+- name :: [String][se-String]
+- camStr :: [ContentAddressMethodWithAlgo][se-ContentAddressMethodWithAlgo]
+- refs :: [List][se-List] of [StorePath][se-StorePath]
+- repairBool :: [Bool64][se-Bool64]
+- [Framed][se-Framed] NAR dump
+
+#### Outputs
+info :: [ValidPathInfo][se-ValidPathInfo]
+
+
+## AddTextToStore
+
+**Id:** 8<br>
+**Introduced:** Nix 0.11<br>
+**Obsolete:** Protocol 1.25, Nix 2.4
+
+Add a text file as a store path.
+
+This was obsoleted by adding the functionality implemented by this operation
+to [AddToStore](#addtostore). And so this corresponds to calling
+[AddToStore](#addtostore) with `camStr` set to `text:sha256` and `text`
+wrapped as a NAR.
+
+### Inputs
+- suffix :: [String][se-String]
+- text :: [Bytes][se-Bytes]
+- refs :: [List][se-List] of [StorePath][se-StorePath]
+
+### Outpus
+path :: [StorePath][se-StorePath]
+
+
+## BuildPaths
+
+**Id:** 9<br>
+****Introduced:**** Nix 0.11<br>
+
+Build (or substitute) a list of derivations.
+
+### Inputs
+paths :: [List][se-List] of [DerivedPath][se-DerivedPath]
+
+#### Protocol 1.15 or newer
+mode :: [BuildMode][se-BuildMode]
+
+Check that connection is trusted before allowing Repair mode.
+
+### Outputs
+1 :: [Int][se-Int] (hardcoded)
+
+
+## EnsurePath
+
+**Id:** 10<br>
+**Introduced:** Nix 0.11<br>
+
+Checks if a path is valid. Note: it may be made valid by running a substitute.
+
+### Inputs
+path :: [StorePath][se-StorePath]
+
+### Outputs
+1 :: [Int][se-Int] (hardcoded)
+
+
+## AddTempRoot
+
+**Id:** 11<br>
+**Introduced:** Nix 0.11<br>
+
+Creates a temporary GC root for the given store path.
+
+Temporary GC roots are valid only for the life of the connection and are used
+primarily to prevent the GC from pulling the rug out from under the client and
+deleting store paths that the client is actively doing something with.
+
+### Inputs
+path :: [StorePath][se-StorePath]
+
+### Outputs
+1 :: [Int][se-Int] (hardcoded)
+
+
+## AddIndirectRoot
+
+**Id:** 12<br>
+**Introduced:** Nix 0.11<br>
+
+Add an indirect root, which is a weak reference to the user-facing symlink
+created by [AddPermRoot](#addpermroot).
+
+Only ever sent on the local unix socket nix daemon.
+
+### Inputs
+path :: [String][se-String]
+
+### Outputs
+1 :: [Int][se-Int] (hardcoded)
+
+
+## SyncWithGC
+
+**Id:** 13<br>
+**Introduced:** Nix 0.11<br>
+**Obsolete:** Protocol 1.32, Nix 2.5.0
+
+Acquire the global GC lock, then immediately release it.  This function must be
+called after registering a new permanent root, but before exiting.  Otherwise,
+it is possible that a running garbage collector doesn't see the new root and
+deletes the stuff we've just built.  By acquiring the lock briefly, we ensure
+that either:
+
+- The collector is already running, and so we block until the
+    collector is finished.  The collector will know about our
+    *temporary* locks, which should include whatever it is we
+    want to register as a permanent lock.
+- The collector isn't running, or it's just started but hasn't
+    acquired the GC lock yet.  In that case we get and release
+    the lock right away, then exit.  The collector scans the
+    permanent root and sees ours.
+
+In either case the permanent root is seen by the collector.
+
+Was made obsolete by using [AddTempRoot](#addtemproot) to accomplish the same
+thing.
+
+
+## FindRoots
+
+**Id:** 14<br>
+**Introduced:** Nix 0.11<br>
+
+Find the GC roots.
+
+### Outputs
+roots :: [Map][se-Map] of [String][se-String] to [StorePath][se-StorePath]
+
+The key is the link pointing to the given store path.
+
+
+## Old CollectGarbage (removed)
+
+**Id:** 15<br>
+**Introduced:** Nix 0.11<br>
+**Removed:** Protocol 1.02, Nix 0.12<br>
+
+
+## ExportPath
+
+**Id:** 16<br>
+**Introduced:** Nix 0.11<br>
+**Obsolete:** Protocol 1.17, Nix 2.0<br>
+
+Export a store path in the binary format nix-store --import expects. See implementation there https://github.com/NixOS/nix/blob/db3bf180a569cb20db42c5e4669d2277be6f46b6/src/libstore/export-import.cc#L29 for more details.
+
+### Inputs
+- path :: [StorePath][se-StorePath]
+- sign :: [Int][se-Int] (ignored and hardcoded to 0 in client)
+
+### Outputs
+Uses `STDERR_WRITE` to send dump in export format
+
+After dump it outputs.
+
+1 :: [Int][se-Int] (hardcoded)
+
+
+## ImportPath (removed)
+
+**Id:** 17<br>
+**Introduced:** Nix 0.11<br>
+**Removed:** Protocol 1.09, Nix 1.0<br>
+
+
+## QueryDeriver
+
+**Id:** 18<br>
+**Introduced:** Nix 0.11<br>
+**Obsolete:** Protocol 1.16, Nix 2.0<br>
+
+Returns the store path of the derivation for a given store path.
+
+### Inputs
+path :: [StorePath][se-StorePath]
+
+### Outputs
+deriver :: [OptStorePath][se-OptStorePath]
+
+
+## SetOptions
+
+**Id:** 19<br>
+**Introduced:** Nix 0.11<br>
+
+Sends client options to the remote side.
+
+Only ever used right after the handshake.
+
+### Inputs
+
+- keepFailed :: [Int][se-Int]
+- keepGoing :: [Int][se-Int]
+- tryFallback :: [Int][se-Int]
+- verbosity :: [Verbosity][se-Verbosity]
+- maxbuildJobs :: [Int][se-Int]
+- maxSilentTime :: [Int][se-Int]
+- useBuildHook :: [Bool][se-Bool] (ignored and hardcoded to true in client)
+- verboseBuild :: [Verbosity][se-Verbosity]
+- logType :: [Int][se-Int] (ignored and hardcoded to 0 in client)
+- printBuildTrace :: [Int][se-Int] (ignored and hardcoded to 0 in client)
+- buildCores :: [Int][se-Int]
+- useSubstitutes :: [Int][se-Int]
+
+### Protocol 1.12 or newer
+otherSettings :: [Map][se-Map] of [String][se-String] to [String][se-String]
+
+
+## CollectGarbage
+
+**Id:** 20<br>
+**Introduced:** Protocol 1.02, Nix 0.12<br>
+
+Find the GC roots.
+
+### Inputs
+- action :: [GCAction][se-GCAction]
+- pathsToDelete :: [List][se-List] of [StorePath][se-StorePath]
+- ignoreLiveness :: [Bool64][se-Bool64]
+- maxFreed :: [UInt64][se-UInt64]
+- removed :: [Int][se-Int] (ignored and hardcoded to 0 in client)
+- removed :: [Int][se-Int] (ignored and hardcoded to 0 in client)
+- removed :: [Int][se-Int] (ignored and hardcoded to 0 in client)
+
+### Outputs
+- pathsDeleted :: [List][se-List] of [String][se-String]
+- bytesFreed :: [UInt64][se-UInt64]
+- 0 :: [UInt64][se-UInt64] (hardcoded)
+
+Depending on the action pathsDeleted is, the GC roots, or the paths that would
+be or have been deleted.
+
+
+## QuerySubstitutablePathInfo
+
+**Id:** 21<br>
+**Introduced:** Protocol 1.02, Nix 0.12<br>
+
+Retrieves the various substitutable paths infos for a given path.
+
+### Inputs
+path :: [StorePath][se-StorePath]
+
+### Outputs
+found :: [Bool][se-Bool]
+
+#### If found is true
+- deriver :: [OptStorePath][se-OptStorePath]
+- references :: [List][se-List] of [StorePath][se-StorePath]
+- downloadSize :: [UInt64][se-UInt64]
+- narSize :: [UInt64][se-UInt64]
+
+
+## QueryDerivationOutputs
+
+**Id:** 22<br>
+**Introduced:** Protocol 1.05, Nix 1.0<br>
+**Obsolete:** Protocol 1.22*, Nix 2.4<br>
+
+Retrieves all the outputs paths of a given derivation.
+
+### Inputs
+path :: [StorePath][se-StorePath] (must point to a derivation)
+
+### Outputs
+derivationOutputs :: [List][se-List] of [StorePath][se-StorePath]
+
+
+## QueryAllValidPaths
+
+**Id:** 23<br>
+**Introduced:** Protocol 1.05, Nix 1.0<br>
+
+Retrieves all the valid paths contained in the store.
+
+### Outputs
+paths :: [List][se-List] of [StorePath][se-StorePath]
+
+
+## QueryFailedPaths (removed)
+
+**Id:** 24<br>
+**Introduced:** Protocol 1.05, Nix 1.0<br>
+**Removed:** Protocol 1.16, Nix 2.0<br>
+
+Failed build caching API only ever used by Hydra.
+
+
+## ClearFailedPaths (removed)
+
+**Id:** 25<br>
+**Introduced:** Protocol 1.05, Nix 1.0<br>
+**Removed:** Protocol 1.16, Nix 2.0<br>
+
+Failed build caching API only ever used by Hydra.
+
+
+## QueryPathInfo
+
+**Id:** 26<br>
+**Introduced:** Protocol 1.06, Nix 1.0<br>
+
+Retrieves the pathInfo for a given path.
+
+### Inputs
+path :: [StorePath][se-StorePath]
+
+### Outputs
+
+#### If protocol version is 1.17 or newer
+success :: [Bool64][se-Bool64]
+
+##### If success is true
+pathInfo :: [UnkeyedValidPathInfo][se-UnkeyedValidPathInfo]
+
+#### If protocol version is older than 1.17
+If info not found return error with `STDERR_ERROR`
+
+pathInfo :: [UnkeyedValidPathInfo][se-UnkeyedValidPathInfo]
+
+
+## ImportPaths
+
+**Id:** 27<br>
+**Introduced:** Protocol 1.09, Nix 1.0<br>
+**Obsolete:** Protocol 1.17, Nix 2.0<br>
+
+Older way of adding a store path to the remote store.
+
+It was obsoleted and replaced by AddToStoreNar because it sends the NAR
+before the metadata about the store path and so you would typically have
+to store the NAR in memory or temporarily on disk before processing it.
+
+### Inputs
+List of NAR dumps coming from the ExportPaths operations.
+
+### Outputs
+importedPaths :: [List][se-List] of [StorePath][se-StorePath]
+
+
+## QueryDerivationOutputNames
+
+**Id:** 28<br>
+**Introduced:** Protocol 1.08, Nix 1.0<br>
+**Obsolete:** Protocol 1.21, Nix 2.4<br>
+
+Retrieves the name of the outputs of a given derivation. EG. out, dev, etc.
+
+### Inputs
+path :: [StorePath][se-StorePath] (must be a derivation path)
+
+### Outputs
+names :: [List][se-List] of [String][se-String]
+
+
+## QueryPathFromHashPart
+
+**Id:** 29<br>
+**Introduced:** Protocol 1.11, Nix 1.1<br>
+
+Retrieves a store path from a base16 (input) hash. Returns "" if no path was
+found.
+
+### Inputs
+hashPart :: [String][se-String]  (must be a base-16 hash)
+
+### Outputs
+path :: [OptStorePath][se-OptStorePath]
+
+
+## QuerySubstitutablePathInfos
+
+**Id:** 30<br>
+**Introduced:** Protocol 1.12*, Nix 1.2<br>
+
+Retrieves the various substitutable paths infos for set of store paths.
+
+### Inputs
+#### If protocol version is 1.22 or newer
+paths :: [Map][se-Map] of [StorePath][se-StorePath] to [OptContentAddress][se-OptContentAddress] 
+
+#### If protocol version older than 1.22
+paths :: [List][se-List] of [StorePath][se-StorePath]
+
+### Outputs
+infos :: [List][se-List] of [SubstitutablePathInfo][se-SubstitutablePathInfo]
+
+
+## QueryValidPaths
+
+**Id:** 31<br>
+**Introduced:** Protocol 1.12, Nix 1.2<br>
+
+Takes a list of store paths and returns a new list only containing the valid store paths
+
+## Inputs
+paths :: [List][se-List] of [StorePath][se-StorePath]
+
+### If protocol version is 1.27 or newer
+substitute :: [Bool][se-Bool] (defaults to false if not sent)
+
+## Outputs
+paths :: [List][se-List] of [StorePath][se-StorePath]
+
+
+## QuerySubstitutablePaths
+
+**Id:** 32<br>
+**Introduced:** Protocol 1.12, Nix 1.2<br>
+
+Takes a set of store path, returns a filtered new set of paths that can be
+substituted.
+
+In versions of the protocol prior to 1.12 [HasSubstitutes](#hassubstitutes)
+is used to implement the functionality that this operation provides.
+
+### Inputs
+paths :: [List][se-List] of [StorePath][se-StorePath]
+
+### Outputs
+paths :: [List][se-List] of [StorePath][se-StorePath]
+
+
+## QueryValidDerivers
+
+**Id:** 33<br>
+**Introduced:** Protocol 1.13*, Nix 1.3<br>
+
+Retrieves the derivers of a given path.
+
+### Inputs
+path :: [StorePath][se-StorePath]
+
+### Outputs
+derivers :: [List][se-List] of [StorePath][se-StorePath]
+
+
+## OptimiseStore
+
+**Id:** 34<br>
+**Introduced:** Protocol 1.14, Nix 1.8<br>
+
+Optimise store by hardlinking files with the same content.
+
+### Outputs
+1 :: [Int][se-Int] (hardcoded)
+
+
+## VerifyStore
+
+**Id:** 35<br>
+**Introduced:** Protocol 1.14, Nix 1.9<br>
+
+Verify store either only db and existence of path or entire contents of store
+paths against the NAR hash. 
+
+### Inputs
+- checkContents :: [Bool64][se-Bool64]
+- repair :: [Bool64][se-Bool64]
+
+### Outputs
+errors :: [Bool][se-Bool]
+
+
+## BuildDerivation
+
+**Id:** 36<br>
+**Introduced:** Protocol 1.14, Nix 1.10<br>
+
+Main build operation used when remote building.
+
+When functioning as a remote builder this operation is used instead of
+BuildPaths so that it doesn't have to send the entire tree of derivations
+to the remote side first before it can start building. What this does
+instead is have a reduced version of the derivation to be built sent as
+part of its input and then only building that derivation.
+
+The paths required by the build need to be part of the remote store
+(by copying with AddToStoreNar or substituting) before this operation is
+called.
+
+### Inputs
+- drvPath :: [StorePath][se-StorePath]
+- drv :: [BasicDerivation][se-BasicDerivation]
+- buildMode :: [BuildMode][se-BuildMode]
+
+### Outputs
+buildResult :: [BuildResult][se-BuildResult]
+
+
+## AddSignatures
+
+**Id:** 37<br>
+**Introduced:** Protocol 1.16, Nix 2.0<br>
+
+Add the signatures associated to a given path.
+
+### Inputs
+- path :: [StorePath][se-StorePath]
+- signatures :: [List][se-List] of [String][se-String]
+
+### Outputs
+1 :: [Int][se-Int] (hardcoded)
+
+
+## NarFromPath
+
+**Id:** 38<br>
+**Introduced:** Protocol 1.17, Nix 2.0<br>
+
+Main way of getting the contents of a store path to the client.
+
+As the name suggests this is done by sending a NAR file.
+
+It replaced the now obsolete ExportPath operation and is used by newer clients to
+implement the export functionality for cli. It is also used when remote building
+to transfer build results from remote builder to client.
+
+### Inputs
+path :: [StorePath][se-StorePath]
+
+### Outputs
+NAR dumped straight to the stream.
+
+
+## AddToStoreNar
+
+**Id:** 39<br>
+**Introduced:** Protocol 1.17, Nix 2.0<br>
+
+Dumps a path as a NAR
+
+### Inputs
+- path :: [StorePath][se-StorePath]
+- deriver :: [OptStorePath][se-OptStorePath]
+- narHash :: [String][se-String] SHA256 NAR hash base 16
+- references :: [List][se-List] of [StorePath][se-StorePath]
+- registrationTime :: [Time][se-Time]
+- narSize :: [UInt64][se-UInt64]
+- ultimate :: [Bool64][se-Bool64]
+- signatures :: [List][se-List] of [String][se-String]
+- ca :: [OptContentAddress][se-OptContentAddress]
+- repair :: [Bool64][se-Bool64]
+- dontCheckSigs :: [Bool64][se-Bool64]
+
+#### If protocol version is 1.23 or newer
+[Framed][se-Framed] NAR dump
+
+#### If protocol version is between 1.21 and 1.23
+NAR dump sent using `STDERR_READ`
+
+#### If protocol version is older than 1.21
+NAR dump sent raw on stream
+
+
+## QueryMissing
+
+**Id:** 40<br>
+**Introduced:** Protocol 1.19*, Nix 2.0<br>
+
+### Inputs
+targets :: [List][se-List] of [DerivedPath][se-DerivedPath]
+
+### Outputs
+- willBuild :: [List][se-List] of [StorePath][se-StorePath]
+- willSubstitute :: [List][se-List] of [StorePath][se-StorePath]
+- unknown :: [List][se-List] of [StorePath][se-StorePath]
+- downloadSize :: [UInt64][se-UInt64]
+- narSize :: [UInt64][se-UInt64]
+
+
+## QueryDerivationOutputMap
+
+**Id:** 41<br>
+**Introduced:** Protocol 1.22*, Nix 2.4<br>
+
+Retrieves an associative map outputName -> storePath for a given derivation.
+
+### Inputs
+path :: [StorePath][se-StorePath]  (must be a derivation path)
+
+### Outputs
+outputs :: [Map][se-Map] of [String][se-String] to [OptStorePath][se-OptStorePath]
+
+
+## RegisterDrvOutput
+
+**Id:** 42<br>
+**Introduced:** Protocol 1.27, Nix 2.4<br>
+
+Registers a DRV output
+
+### Inputs
+#### If protocol is 1.31 or newer
+realisation :: [Realisation][se-Realisation]
+
+#### If protocol is older than 1.31
+- outputId :: [DrvOutput][se-DrvOutput]
+- outputPath :: [StorePath][se-StorePath]
+
+
+## QueryRealisation
+
+**Id:** 43<br>
+**Introduced:** Protocol 1.27, Nix 2.4<br>
+
+Retrieves the realisations attached to a drv output id realisations.
+
+### Inputs
+outputId :: [DrvOutput][se-DrvOutput]
+
+### Outputs
+#### If protocol is 1.31 or newer
+realisations :: [List][se-List] of [Realisation][se-Realisation]
+
+#### If protocol is older than 1.31
+outPaths :: [List][se-List] of [BaseStorePath][se-BaseStorePath]
+
+
+## AddMultipleToStore
+
+**Id:** 44<br>
+**Introduced:** Protocol 1.32*, Nix 2.4<br>
+
+A pipelined version of [AddToStoreNar](#addtostorenar) where you can add
+multiple paths in one go.
+
+Added because the protocol doesn't support pipelining and so on a low latency
+connection waiting for the request/response of [AddToStoreNar](#addtostorenar)
+for each small NAR was costly.
+
+### Inputs
+- repair :: [Bool64][se-Bool64]
+- dontCheckSigs :: [Bool64][se-Bool64]
+- [Framed][se-Framed] stream of add multiple NAR dump
+
+
+## AddBuildLog
+
+**Id:** 45<br>
+**Introduced:** Protocol 1.32, Nix 2.6.0<br>
+
+Attach some build logs to a given build.
+
+### Inputs
+- path :: [String][se-String] (might be [BaseStorePath][se-BaseStorePath])
+- [Framed][se-Framed] stream of log lines
+
+### Outputs
+1 :: [Int][se-Int] (hardcoded)
+
+
+## BuildPathsWithResults
+
+**Id:** 46<br>
+**Introduced:** Protocol 1.34*, Nix 2.8.0<br>
+
+Build (or substitute) a list of derivations and returns a list of results.
+
+### Inputs
+- drvs :: [List][se-List] of [DerivedPath][se-DerivedPath]
+- mode :: [BuildMode][se-BuildMode]
+
+### Outputs
+results :: [List][se-List] of [KeyedBuildResult][se-KeyedBuildResult]
+
+
+## AddPermRoot
+
+**Id:** 47<br>
+**Introduced:** Protocol 1.36*, Nix 2.20.0<br>
+
+### Inputs
+- storePath :: [StorePath][se-StorePath]
+- gcRoot :: [String][se-String]
+
+### Outputs
+gcRoot :: [String][se-String]
+
+
+
+[se-Int]: ./serialization.md#int
+[se-UInt8]: ./serialization.md#uint8
+[se-UInt64]: ./serialization.md#uint64
+[se-Bool]: ./serialization.md#bool
+[se-Bool64]: ./serialization.md#bool64
+[se-Time]: ./serialization.md#time
+[se-FileIngestionMethod]: ./serialization.md#fileingestionmethod
+[se-BuildMode]: ./serialization.md#buildmode
+[se-Verbosity]: ./serialization.md#verbosity
+[se-GCAction]: ./serialization.md#gcaction
+[se-Bytes]: ./serialization.md#bytes
+[se-String]: ./serialization.md#string
+[se-StorePath]: ./serialization.md#storepath
+[se-BaseStorePath]: ./serialization.md#basestorepath
+[se-OptStorePath]: ./serialization.md#optstorepath
+[se-ContentAddressMethodWithAlgo]: ./serialization.md#contentaddressmethodwithalgo
+[se-OptContentAddress]: ./serialization.md#optcontentaddress
+[se-DerivedPath]: ./serialization.md#derivedpath
+[se-DrvOutput]: ./serialization.md#drvoutput
+[se-Realisation]: ./serialization.md#realisation
+[se-List]: ./serialization.md#list-of-x
+[se-Map]: ./serialization.md#map-of-x-to-y
+[se-SubstitutablePathInfo]: ./serialization.md#substitutablepathinfo
+[se-ValidPathInfo]: ./serialization.md#validpathinfo
+[se-UnkeyedValidPathInfo]: ./serialization.md#unkeyedvalidpathinfo
+[se-BuildResult]: ./serialization.md#buildmode
+[se-KeyedBuildResult]: ./serialization.md#keyedbuildresult
+[se-BasicDerivation]: ./serialization.md#basicderivation
+[se-Framed]: ./serialization.md#framed
\ No newline at end of file
diff --git a/tvix/docs/src/nix-daemon/serialization.md b/tvix/docs/src/nix-daemon/serialization.md
new file mode 100644
index 0000000000..a2694a4dea
--- /dev/null
+++ b/tvix/docs/src/nix-daemon/serialization.md
@@ -0,0 +1,305 @@
+
+### UInt64
+Little endian byte order
+
+### Bytes
+
+- len :: [UInt64](#uint64)
+- len bytes of content
+- padding with zeros to ensure 64 bit alignment of content with padding
+
+
+## Int serializers
+
+### Int
+[UInt64](#uint64) cast to C `unsigned int` with upper bounds checking.
+
+### Int64
+[UInt64](#uint64) cast to C `int64_t` with upper bounds checking.
+
+### UInt8
+[UInt64](#uint64) cast to C `uint8_t` with upper bounds checking.
+
+### Size
+[UInt64](#uint64) cast to C `size_t` with upper bounds checking.
+
+### Time
+[UInt64](#uint64) cast to C `time_t` with upper bounds checking.
+s
+### Bool
+Sent as an [Int](#int) where 0 is false and everything else is true.
+
+### Bool64
+Sent as an [UInt64](#uint64) where 0 is false and everything else is true.
+
+### FileIngestionMethod
+An [UInt8](#uint8) enum with the following possible values:
+
+| Name      | Int |
+| --------- | --- |
+| Flat      |  0  |
+| Recursive |  1  |
+
+### BuildMode
+An [Int](#int) enum with the following possible values:
+
+| Name   | Int |
+| ------ | --- |
+| Normal |  0  |
+| Repair |  1  |
+| Check  |  2  |
+
+### Verbosity
+An [Int](#int) enum with the following possible values:
+
+| Name      | Int |
+| --------- | --- |
+| Error     |  0  |
+| Warn      |  1  |
+| Notice    |  2  |
+| Info      |  3  |
+| Talkative |  4  |
+| Chatty    |  5  |
+| Debug     |  6  |
+| Vomit     |  7  |
+
+### GCAction
+An [Int](#int) enum with the following possible values:
+
+| Name           | Int |
+| -------------- | --- |
+| ReturnLive     |  0  |
+| ReturnDead     |  1  |
+| DeleteDead     |  2  |
+| DeleteSpecific |  3  |
+
+### BuildStatus
+An [Int](#int) enum with the following possible values:
+
+| Name                   | Int |
+| ---------------------- | --- |
+| Built                  |  0  |
+| Substituted            |  1  |
+| AlreadyValid           |  2  |
+| PermanentFailure       |  3  |
+| InputRejected          |  4  |
+| OutputRejected         |  5  |
+| TransientFailure       |  6  |
+| CachedFailure          |  7  |
+| TimedOut               |  8  |
+| MiscFailure            |  9  |
+| DependencyFailed       | 10  |
+| LogLimitExceeded       | 11  |
+| NotDeterministic       | 12  |
+| ResolvesToAlreadyValid | 13  |
+| NoSubstituters         | 14  |
+
+### ActivityType
+An [Int](#int) enum with the following possible values:
+
+| Name          | Int |
+| ------------- | --- |
+| Unknown       |   0 |
+| CopyPath      | 100 |
+| FileTransfer  | 101 |
+| Realise       | 102 |
+| CopyPaths     | 103 |
+| Builds        | 104 |
+| Build         | 105 |
+| OptimiseStore | 106 |
+| VerifyPaths   | 107 |
+| Substitute    | 108 |
+| QueryPathInfo | 109 |
+| PostBuildHook | 110 |
+| BuildWaiting  | 111 |
+| FetchTree     | 112 |
+
+### ResultType
+An [Int](#int) enum with the following possible values:
+
+| Name             | Int |
+| ---------------- | --- |
+| FileLinked       | 100 |
+| BuildLogLine     | 101 |
+| UntrustedPath    | 102 |
+| CorruptedPath    | 103 |
+| SetPhase         | 104 |
+| Progress         | 105 |
+| SetExpected      | 106 |
+| PostBuildLogLine | 107 |
+| FetchStatus      | 108 |
+
+### FieldType
+An [Int](#int) enum with the following possible values:
+
+| Name   | Int |
+| ------ | --- |
+| Int    |  0  |
+| String |  1  |
+
+
+## Bytes serializers
+
+### String
+Simply a [Bytes](#bytes) that has some UTF-8 string like semantics sometimes.
+
+### StorePath
+String representation of a full store path.
+
+### BaseStorePath
+String representation of the basename of a store path. That is the store path
+without the /nix/store prefix.
+
+### OptStorePath
+Optional store path.
+
+If no store path this is serialized as the empty string otherwise it is the same as
+[StorePath](#storepath).
+
+### ContentAddressMethodWithAlgo
+One of the following strings:
+- text:`hash algorithm`
+- fixed:r:`hash algorithm`
+- fixed:`hash algorithm`
+
+### DerivedPath
+#### If protocol is 1.30 or newer
+        return DerivedPath::parseLegacy(store, s);
+#### If protocol is older than 1.30
+        return parsePathWithOutputs(store, s).toDerivedPath();
+
+### ContentAddress
+String with the format:
+- [ContentAddressMethodWithAlgo](#contentaddressmethodwithalgo):`hash`
+
+### OptContentAddress
+Optional version of [ContentAddress](#contentaddress) where empty string means
+no content address.
+
+### DrvOutput
+String with format:
+- `hash with any prefix`!`output name`
+
+### Realisation
+A JSON object sent as a string.
+
+The JSON object has the following keys:
+| Key                   | Value                   |
+| --------------------- | ----------------------- |
+| id                    | [DrvOutput](#drvoutput) |
+| outPath               | [StorePath](#storepath) |
+| signatures            | Array of String         |
+| dependentRealisations | Object where key is [DrvOutput](#drvoutput) and value is [StorePath](#storepath) |
+
+
+## Complex serializers
+
+### List of x
+A list is encoded as a [Size](#size) length n followed by n encodings of x
+
+### Map of x to y
+A map is encoded as a [Size](#size) length n followed by n encodings of pairs of x and y
+
+
+### BuildResult
+- status :: [BuildStatus](#buildstatus)
+- errorMsg :: [String](#string)
+
+#### Protocol 1.29 or newer
+- timesBuilt :: [Int](#int)
+- isNonDeterministic :: [Bool64](#bool64)
+- startTime :: [Time](#time)
+- stopTime :: [Time](#time)
+
+#### Protocol 1.37 or newer
+- cpuUser :: [OptMicroseconds](#optmicroseconds)
+- cpuSystem :: [OptMicroseconds](#optmicroseconds)
+
+#### Protocol 1.28 or newer
+builtOutputs ::  [Map](#map-of-x-to-y) of [DrvOutput](#drvoutput) to [Realisation](#realisations)
+
+### KeyedBuildResult
+- path :: [DerivedPath](#derivedpath)
+- result :: [BuildResult](#buildresult)
+
+### OptMicroseconds
+Optional microseconds.
+
+- tag :: [UInt8](#uint8)
+
+#### If tag is 1
+- seconds :: [Int64](#int64)
+
+
+### SubstitutablePathInfo
+- storePath :: [StorePath](#storepath)
+- deriver :: [OptStorePath](#optstorepath)
+- references :: [List](#list-of-x) of [StorePath](#storepath)
+- downloadSize :: [UInt64](#uint64)
+- narSize :: [UInt64](#uint64)
+
+
+### UnkeyedValidPathInfo
+- deriver :: [OptStorePath](#optstorepath)
+- narHash :: [String](#string) SHA256 NAR hash base16 encoded
+- references :: [List](#list-of-x) of [StorePath](#storepath)
+- registrationTime :: [Time](#time)
+- narSize :: [UInt64](#uint64)
+
+#### If protocol version is 1.16 or above
+- ultimate :: [Bool64](#bool64)
+- signatures :: [List](#list-of-x) of [String](#string)
+- ca :: [OptContentAddress](#optcontentaddress)
+
+
+### ValidPathInfo
+- path :: [StorePath](#storepath)
+- info :: [UnkeyedValidPathInfo](#unkeyedvalidpathinfo)
+
+### DerivationOutput
+- path :: [String](#string)
+- hashAlgo :: [String](#string)
+- hash :: [String](#string)
+
+### BasicDerivation
+- outputs :: [Map](#map-of-x-to-y) of [String](#string) to [DerivationOutput](#derivationoutput)
+- inputSrcs :: [List](#list-of-x) of [StorePath](#storepath)
+- platform :: [String](#string)
+- builder :: [String](#string)
+- args :: [List](#list-of-x) of [String](#string)
+- env :: [Map](#map-of-x-to-y) of [String](#string) to [String](#string)
+
+### TraceLine
+- havePos :: [Size](#size) (hardcoded to 0)
+- hint :: [String](#string)
+
+### Error
+- type :: [String](#string) (hardcoded to `Error`)
+- level :: [Verbosity](#verbosity)
+- name :: [String](#string) (removed and hardcoded to `Error`)
+- msg :: [String](#string)
+- havePos :: [Size](#size) (hardcoded to 0)
+- traces :: [List](#list-of-x) of [TraceLine](#traceline)
+
+## Field
+- type :: [FieldType](#fieldtype)
+
+### If type is Int
+- value :: [UInt64](#uint64)
+
+### If type is String
+- value :: [String](#string)
+
+
+## Framed
+
+At protocol 1.23 [AddToStoreNar](./operations.md#addtostorenar) introduced a
+framed streaming for sending the NAR dump and later versions of the protocol
+also used this framing for other operations.
+
+At its core the framed streaming is just a series of [Bytes](#bytes) of
+varying length and terminated by an empty [Bytes](#bytes).
+
+This method of sending data has the advantage of not having to parse the data
+to find where it ends. Older versions of the protocol would potentially parse
+the NAR twice.
\ No newline at end of file
diff --git a/tvix/store/docs/api.md b/tvix/docs/src/store/api.md
index c1dacc89a5..21c23ab6ad 100644
--- a/tvix/store/docs/api.md
+++ b/tvix/docs/src/store/api.md
@@ -5,7 +5,7 @@ This document outlines the design of the API exposed by tvix-castore and tvix-
 store, as well as other implementations of this store protocol.
 
 This document is meant to be read side-by-side with
-[castore.md](../../tvix-castore/docs/castore.md) which describes the data model
+[Data Model](../castore/data-model.md) which describes the data model
 in more detail.
 
 The store API has four main consumers:
@@ -205,7 +205,7 @@ and potentially a chain of `Directory` objects requested from
 When the desired file is reached, the *BlobService* can be used to read the
 contents of this file, and return it back to the evaluator.
 
-FUTUREWORK: define how importing from symlinks should/does work.
+FUTUREWORK: Define how importing from symlinks should/does work.
 
 Contrary to Nix, this has the advantage of not having to copy all of the
 contents of a store path to the evaluating machine, but really only fetching
@@ -218,7 +218,7 @@ This is useful for people running a Tvix-only system, or running builds on a
 In a system with Nix installed, we can't simply manually "extract" things to
 `/nix/store`, as Nix assumes to own all writes to this location.
 In these use cases, we're probably better off exposing a tvix-store as a local
-binary cache (that's what `//tvix/nar-bridge` does).
+binary cache (that's what `//tvix/nar-bridge-go` does).
 
 Assuming we are in an environment where we control `/nix/store` exclusively, a
 "realize to disk" would either "extract" things from the `tvix-store` to a
diff --git a/tvix/docs/src/value-pointer-equality.md b/tvix/docs/src/value-pointer-equality.md
index d84efcb50c..a4539513ef 100644
--- a/tvix/docs/src/value-pointer-equality.md
+++ b/tvix/docs/src/value-pointer-equality.md
@@ -47,8 +47,10 @@ works in C++ Nix, the only production ready Nix implementation currently availab
 
 ## Nix (Pointer) Equality in C++ Nix
 
-TIP: The summary presented here is up-to-date as of 2023-06-27 and was tested
-with Nix 2.3, 2.11 and 2.15.
+```admonish info
+The summary presented here is up-to-date as of 2023-06-27 and was tested with
+Nix 2.3, 2.11 and 2.15.
+```
 
 ### `EvalState::eqValues` and `ExprOpEq::eval`
 
diff --git a/tvix/eval/Cargo.toml b/tvix/eval/Cargo.toml
index 677ce6ab85..4cf8ea146c 100644
--- a/tvix/eval/Cargo.toml
+++ b/tvix/eval/Cargo.toml
@@ -30,7 +30,6 @@ smol_str = "0.2.0"
 tabwriter = "1.2"
 test-strategy = { version = "0.2.1", optional = true }
 toml = "0.6.0"
-xml-rs = "0.8.4"
 sha2 = "0.10.8"
 sha1 = "0.10.6"
 md-5 = "0.10.6"
@@ -43,6 +42,9 @@ pretty_assertions = "1.2.1"
 rstest = "0.19.0"
 tempfile = "3.3.0"
 
+[target.'cfg(not(target_env = "msvc"))'.dev-dependencies]
+tikv-jemallocator = "0.5"
+
 [features]
 default = ["impure", "arbitrary", "nix_tests"]
 
diff --git a/tvix/eval/benches/eval.rs b/tvix/eval/benches/eval.rs
index 57d4eb71b5..1333f5018c 100644
--- a/tvix/eval/benches/eval.rs
+++ b/tvix/eval/benches/eval.rs
@@ -1,5 +1,11 @@
 use criterion::{black_box, criterion_group, criterion_main, Criterion};
 use itertools::Itertools;
+#[cfg(not(target_env = "msvc"))]
+use tikv_jemallocator::Jemalloc;
+
+#[cfg(not(target_env = "msvc"))]
+#[global_allocator]
+static GLOBAL: Jemalloc = Jemalloc;
 
 fn interpret(code: &str) {
     tvix_eval::Evaluation::new_pure().evaluate(code, None);
diff --git a/tvix/eval/build.rs b/tvix/eval/build.rs
index a9c9a78b06..b37a6e8a0c 100644
--- a/tvix/eval/build.rs
+++ b/tvix/eval/build.rs
@@ -5,5 +5,10 @@ fn main() {
         "cargo:rustc-env=TVIX_CURRENT_SYSTEM={}",
         &env::var("TARGET").unwrap()
     );
-    println!("cargo:rerun-if-changed-env=TARGET")
+    println!("cargo:rerun-if-changed-env=TARGET");
+
+    // Pick up new test case files
+    // https://github.com/la10736/rstest/issues/256
+    println!("cargo:rerun-if-changed=src/tests/nix_tests");
+    println!("cargo:rerun-if-changed=src/tests/tvix_tests")
 }
diff --git a/tvix/eval/default.nix b/tvix/eval/default.nix
index 91661291f7..9dd5875f85 100644
--- a/tvix/eval/default.nix
+++ b/tvix/eval/default.nix
@@ -1,9 +1,16 @@
 # TODO: find a way to build the benchmarks via crate2nix
-{ depot, pkgs, ... }:
+{ depot, pkgs, lib, ... }:
 
-depot.tvix.crates.workspaceMembers.tvix-eval.build.override {
+(depot.tvix.crates.workspaceMembers.tvix-eval.build.override {
   runTests = true;
 
   # Make C++ Nix available, to compare eval results against.
   testInputs = [ pkgs.nix ];
-}
+}).overrideAttrs (old: rec {
+  meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+  passthru = depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = [ "nix_tests" ];
+    override.testInputs = [ pkgs.nix ];
+  };
+})
diff --git a/tvix/eval/src/builtins/mod.rs b/tvix/eval/src/builtins/mod.rs
index 04a0b3dd33..96e9985747 100644
--- a/tvix/eval/src/builtins/mod.rs
+++ b/tvix/eval/src/builtins/mod.rs
@@ -274,9 +274,10 @@ mod pure_builtins {
         list: Value,
     ) -> Result<Value, ErrorKind> {
         let mut separator = separator.to_contextful_str()?;
+
         let mut context = NixContext::new();
-        if let Some(sep_context) = separator.context_mut() {
-            context = context.join(sep_context);
+        if let Some(sep_context) = separator.take_context() {
+            context.extend(sep_context.into_iter())
         }
         let list = list.to_list()?;
         let mut res = BString::default();
@@ -296,13 +297,8 @@ mod pure_builtins {
             {
                 Ok(mut s) => {
                     res.push_str(&s);
-                    if let Some(ref mut other_context) = s.context_mut() {
-                        // It is safe to consume the other context here
-                        // because the `list` and `separator` are originally
-                        // moved, here.
-                        // We are not going to use them again
-                        // because the result here is a string.
-                        context = context.join(other_context);
+                    if let Some(other_context) = s.take_context() {
+                        context.extend(other_context.into_iter());
                     }
                 }
                 Err(c) => return Ok(Value::Catchable(Box::new(c))),
@@ -469,15 +465,6 @@ mod pure_builtins {
         toml::from_str(toml_str.to_str()?).map_err(|err| err.into())
     }
 
-    #[builtin("filterSource")]
-    #[allow(non_snake_case)]
-    async fn builtin_filterSource(_co: GenCo, #[lazy] _e: Value) -> Result<Value, ErrorKind> {
-        // TODO: implement for nixpkgs compatibility
-        Ok(Value::from(CatchableErrorKind::UnimplementedFeature(
-            "filterSource".into(),
-        )))
-    }
-
     #[builtin("genericClosure")]
     async fn builtin_generic_closure(co: GenCo, input: Value) -> Result<Value, ErrorKind> {
         let attrs = input.to_attrs()?;
@@ -764,9 +751,8 @@ mod pure_builtins {
         }
 
         if let Some(origin_ctx) = origin.context_mut() {
-            // FUTUREWORK(performance): avoid this clone
-            // and extend in-place.
-            *origin_ctx = origin_ctx.clone().join(&mut ctx_elements.into());
+            origin_ctx.extend(ctx_elements)
+            // TODO: didn't we forget cases where origin had no context?
         }
 
         Ok(origin.into())
@@ -1169,8 +1155,8 @@ mod pure_builtins {
         let mut empty_string_replace = false;
         let mut context = NixContext::new();
 
-        if let Some(string_context) = string.context_mut() {
-            context = context.join(string_context);
+        if let Some(string_context) = string.take_context() {
+            context.extend(string_context.into_iter());
         }
 
         // This can't be implemented using Rust's string.replace() as
@@ -1200,8 +1186,8 @@ mod pure_builtins {
                 if string[i..i + from.len()] == *from {
                     res.push_str(&to);
                     i += from.len();
-                    if let Some(to_ctx) = to.context_mut() {
-                        context = context.join(to_ctx);
+                    if let Some(to_ctx) = to.take_context() {
+                        context.extend(to_ctx.into_iter());
                     }
 
                     // remember if we applied the empty from->to
@@ -1232,8 +1218,8 @@ mod pure_builtins {
 
             if from.is_empty() {
                 res.push_str(&to);
-                if let Some(to_ctx) = to.context_mut() {
-                    context = context.join(to_ctx);
+                if let Some(to_ctx) = to.take_context() {
+                    context.extend(to_ctx.into_iter());
                 }
                 break;
             }
@@ -1291,6 +1277,9 @@ mod pure_builtins {
                 })
                 .collect();
             ret.push_back(Value::List(NixList::from(v)));
+            if pos == text.len() {
+                break;
+            }
             pos = thematch.end();
         }
 
@@ -1504,15 +1493,19 @@ mod pure_builtins {
         }
 
         let mut buf: Vec<u8> = vec![];
-        to_xml::value_to_xml(&mut buf, &value)?;
-        Ok(String::from_utf8(buf)?.into())
-    }
-
-    #[builtin("placeholder")]
-    async fn builtin_placeholder(co: GenCo, #[lazy] _x: Value) -> Result<Value, ErrorKind> {
-        generators::emit_warning_kind(&co, WarningKind::NotImplemented("builtins.placeholder"))
-            .await;
-        Ok("<builtins.placeholder-is-not-implemented-in-tvix-yet>".into())
+        let context = to_xml::value_to_xml(&mut buf, &value)?;
+
+        Ok((
+            buf,
+            // FUTUREWORK: We have a distinction between an empty context, and
+            // no context at all. Fix this.
+            if !context.is_empty() {
+                Some(Box::new(context))
+            } else {
+                None
+            },
+        )
+            .into())
     }
 
     #[builtin("trace")]
@@ -1612,10 +1605,16 @@ pub fn pure_builtins() -> Vec<(&'static str, Value)> {
         crate::systems::llvm_triple_to_nix_double(CURRENT_PLATFORM).into(),
     ));
 
-    // TODO: implement for nixpkgs compatibility
     result.push((
         "__curPos",
-        Value::from(CatchableErrorKind::UnimplementedFeature("__curPos".into())),
+        Value::Thunk(Thunk::new_suspended_native(Box::new(move || {
+            // TODO: implement for nixpkgs compatibility
+            Ok(Value::attrs(NixAttrs::from_iter([
+                ("line", 42.into()),
+                ("column", 42.into()),
+                ("file", Value::String("/deep/thought".into())),
+            ])))
+        }))),
     ));
 
     result
@@ -1623,6 +1622,8 @@ pub fn pure_builtins() -> Vec<(&'static str, Value)> {
 
 #[builtins]
 mod placeholder_builtins {
+    use crate::NixContext;
+
     use super::*;
 
     #[builtin("unsafeDiscardStringContext")]
@@ -1671,24 +1672,17 @@ mod placeholder_builtins {
             .to_contextful_str()?;
 
         // If there's any context, we will swap any ... by a path one.
-        if let Some(ctx) = v.context_mut() {
-            let new_context: tvix_eval::NixContext = ctx
-                .iter()
-                .map(|elem| match elem {
-                    // FUTUREWORK(performance): ideally, we should either:
-                    // (a) do interior mutation of the existing context.
-                    // (b) let the structural sharing make those clones cheap.
-                    crate::NixContextElement::Derivation(drv_path) => {
-                        crate::NixContextElement::Plain(drv_path.to_string())
-                    }
-                    elem => elem.clone(),
-                })
-                .collect::<HashSet<_>>()
-                .into();
+        if let Some(c) = v.take_context() {
+            let mut context = NixContext::new();
+            context.extend(c.into_iter().map(|elem| match elem {
+                crate::NixContextElement::Derivation(drv_path) => {
+                    crate::NixContextElement::Plain(drv_path.to_string())
+                }
+                elem => elem.clone(),
+            }));
 
-            *ctx = new_context;
+            return Ok(Value::String(NixString::new_context_from(context, v)));
         }
-
         Ok(Value::from(v))
     }
 
@@ -1709,6 +1703,7 @@ mod placeholder_builtins {
         _name: Value,
         _attrset: Value,
     ) -> Result<Value, ErrorKind> {
+        // TODO: implement for nixpkgs compatibility
         generators::emit_warning_kind(
             &co,
             WarningKind::NotImplemented("builtins.unsafeGetAttrsPos"),
diff --git a/tvix/eval/src/builtins/to_xml.rs b/tvix/eval/src/builtins/to_xml.rs
index bb12cebfc9..093e127fe2 100644
--- a/tvix/eval/src/builtins/to_xml.rs
+++ b/tvix/eval/src/builtins/to_xml.rs
@@ -3,112 +3,98 @@
 //! things in nixpkgs rely on.
 
 use bstr::ByteSlice;
+use std::borrow::Cow;
 use std::{io::Write, rc::Rc};
-use xml::writer::events::XmlEvent;
-use xml::writer::EmitterConfig;
-use xml::writer::EventWriter;
 
-use crate::{ErrorKind, Value};
+use crate::{ErrorKind, NixContext, NixContextElement, Value};
 
 /// Recursively serialise a value to XML. The value *must* have been
 /// deep-forced before being passed to this function.
-pub fn value_to_xml<W: Write>(mut writer: W, value: &Value) -> Result<(), ErrorKind> {
-    let config = EmitterConfig {
-        perform_indent: true,
-        pad_self_closing: true,
-
-        // Nix uses single-quotes *only* in the document declaration,
-        // so we need to write it manually.
-        write_document_declaration: false,
-        ..Default::default()
-    };
-
+/// On success, returns the NixContext.
+pub fn value_to_xml<W: Write>(mut writer: W, value: &Value) -> Result<NixContext, ErrorKind> {
     // Write a literal document declaration, using C++-Nix-style
     // single quotes.
     writeln!(writer, "<?xml version='1.0' encoding='utf-8'?>")?;
 
-    let mut writer = EventWriter::new_with_config(writer, config);
-
-    writer.write(XmlEvent::start_element("expr"))?;
-    value_variant_to_xml(&mut writer, value)?;
-    writer.write(XmlEvent::end_element())?;
+    let mut emitter = XmlEmitter::new(writer);
 
-    // Unwrap the writer to add the final newline that C++ Nix adds.
-    writeln!(writer.into_inner())?;
+    emitter.write_open_tag("expr", &[])?;
+    value_variant_to_xml(&mut emitter, value)?;
+    emitter.write_closing_tag("expr")?;
 
-    Ok(())
+    Ok(emitter.into_context())
 }
 
 fn write_typed_value<W: Write, V: ToString>(
-    w: &mut EventWriter<W>,
-    name: &str,
+    w: &mut XmlEmitter<W>,
+    name_unescaped: &str,
     value: V,
 ) -> Result<(), ErrorKind> {
-    w.write(XmlEvent::start_element(name).attr("value", &value.to_string()))?;
-    w.write(XmlEvent::end_element())?;
+    w.write_self_closing_tag(name_unescaped, &[("value", &value.to_string())])?;
     Ok(())
 }
 
-fn value_variant_to_xml<W: Write>(w: &mut EventWriter<W>, value: &Value) -> Result<(), ErrorKind> {
+fn value_variant_to_xml<W: Write>(w: &mut XmlEmitter<W>, value: &Value) -> Result<(), ErrorKind> {
     match value {
         Value::Thunk(t) => return value_variant_to_xml(w, &t.value()),
 
         Value::Null => {
-            w.write(XmlEvent::start_element("null"))?;
-            w.write(XmlEvent::end_element())
+            w.write_open_tag("null", &[])?;
+            w.write_closing_tag("null")?;
         }
 
         Value::Bool(b) => return write_typed_value(w, "bool", b),
         Value::Integer(i) => return write_typed_value(w, "int", i),
         Value::Float(f) => return write_typed_value(w, "float", f),
-        Value::String(s) => return write_typed_value(w, "string", s.to_str()?),
+        Value::String(s) => {
+            if let Some(context) = s.context() {
+                w.extend_context(context.iter().cloned());
+            }
+            return write_typed_value(w, "string", s.to_str()?);
+        }
         Value::Path(p) => return write_typed_value(w, "path", p.to_string_lossy()),
 
         Value::List(list) => {
-            w.write(XmlEvent::start_element("list"))?;
+            w.write_open_tag("list", &[])?;
 
             for elem in list.into_iter() {
                 value_variant_to_xml(w, elem)?;
             }
 
-            w.write(XmlEvent::end_element())
+            w.write_closing_tag("list")?;
         }
 
         Value::Attrs(attrs) => {
-            w.write(XmlEvent::start_element("attrs"))?;
+            w.write_open_tag("attrs", &[])?;
 
             for elem in attrs.iter() {
-                w.write(XmlEvent::start_element("attr").attr("name", &elem.0.to_str_lossy()))?;
+                w.write_open_tag("attr", &[("name", &elem.0.to_str_lossy())])?;
                 value_variant_to_xml(w, elem.1)?;
-                w.write(XmlEvent::end_element())?;
+                w.write_closing_tag("attr")?;
             }
 
-            w.write(XmlEvent::end_element())
+            w.write_closing_tag("attrs")?;
         }
 
         Value::Closure(c) => {
-            w.write(XmlEvent::start_element("function"))?;
+            w.write_open_tag("function", &[])?;
 
             match &c.lambda.formals {
                 Some(formals) => {
-                    let mut attrspat = XmlEvent::start_element("attrspat");
+                    let mut attrs: Vec<(&str, &str)> = Vec::with_capacity(2);
                     if formals.ellipsis {
-                        attrspat = attrspat.attr("ellipsis", "1");
+                        attrs.push(("ellipsis", "1"));
                     }
                     if let Some(ref name) = &formals.name {
-                        attrspat = attrspat.attr("name", name.as_str());
+                        attrs.push(("name", name.as_str()));
                     }
 
-                    w.write(attrspat)?;
-
+                    w.write_open_tag("attrspat", &attrs)?;
                     for arg in formals.arguments.iter() {
-                        w.write(
-                            XmlEvent::start_element("attr").attr("name", &arg.0.to_str_lossy()),
-                        )?;
-                        w.write(XmlEvent::end_element())?;
+                        w.write_self_closing_tag("attr", &[("name", &arg.0.to_str_lossy())])?;
                     }
 
-                    w.write(XmlEvent::end_element())?;
+                    w.write_closing_tag("attrspat")?;
                 }
                 None => {
                     // TODO(tazjin): tvix does not currently persist function
@@ -120,17 +106,16 @@ fn value_variant_to_xml<W: Write>(w: &mut EventWriter<W>, value: &Value) -> Resu
                     // If we don't want to persist the data, we can re-parse the
                     // AST from the spans of the lambda's bytecode and figure it
                     // out that way, but it needs some investigating.
-                    w.write(XmlEvent::start_element("varpat").attr("name", /* fake: */ "x"))?;
-                    w.write(XmlEvent::end_element())?;
+                    w.write_self_closing_tag("varpat", &[("name", /* fake: */ "x")])?;
                 }
             }
 
-            w.write(XmlEvent::end_element())
+            w.write_closing_tag("function")?;
         }
 
         Value::Builtin(_) => {
-            w.write(XmlEvent::start_element("unevaluated"))?;
-            w.write(XmlEvent::end_element())
+            w.write_open_tag("unevaluated", &[])?;
+            w.write_closing_tag("unevaluated")?;
         }
 
         Value::AttrNotFound
@@ -148,7 +133,189 @@ fn value_variant_to_xml<W: Write>(w: &mut EventWriter<W>, value: &Value) -> Resu
         Value::Catchable(_) => {
             panic!("tvix bug: value_to_xml() called on a value which had not been deep-forced")
         }
-    }?;
+    };
 
     Ok(())
 }
+
+/// A simple-stupid XML emitter, which implements only the subset needed for byte-by-byte compat with C++ nix’ `builtins.toXML`.
+struct XmlEmitter<W> {
+    /// The current indentation
+    cur_indent: usize,
+    writer: W,
+    context: NixContext,
+}
+
+impl<W: Write> XmlEmitter<W> {
+    pub fn new(writer: W) -> Self {
+        XmlEmitter {
+            cur_indent: 0,
+            writer,
+            context: Default::default(),
+        }
+    }
+
+    /// Write an open tag with the given name (which is not escaped!)
+    /// and attributes (Keys are not escaped! Only attribute values are.)
+    pub fn write_open_tag(
+        &mut self,
+        name_unescaped: &str,
+        attrs: &[(&str, &str)],
+    ) -> std::io::Result<()> {
+        self.add_indent()?;
+        self.writer.write_all(b"<")?;
+        self.writer.write_all(name_unescaped.as_bytes())?;
+        self.write_attrs_escape_vals(attrs)?;
+        self.writer.write_all(b">\n")?;
+        self.cur_indent += 2;
+        Ok(())
+    }
+
+    /// Write a self-closing open tag with the given name (which is not escaped!)
+    /// and attributes (Keys are not escaped! Only attribute values are.)
+    pub fn write_self_closing_tag(
+        &mut self,
+        name_unescaped: &str,
+        attrs: &[(&str, &str)],
+    ) -> std::io::Result<()> {
+        self.add_indent()?;
+        self.writer.write_all(b"<")?;
+        self.writer.write_all(name_unescaped.as_bytes())?;
+        self.write_attrs_escape_vals(attrs)?;
+        self.writer.write_all(b" />\n")?;
+        Ok(())
+    }
+
+    /// Write a closing tag with the given name (which is not escaped!)
+    pub fn write_closing_tag(&mut self, name_unescaped: &str) -> std::io::Result<()> {
+        self.cur_indent -= 2;
+        self.add_indent()?;
+        self.writer.write_all(b"</")?;
+        self.writer.write_all(name_unescaped.as_bytes())?;
+        self.writer.write_all(b">\n")?;
+        Ok(())
+    }
+
+    #[inline]
+    fn add_indent(&mut self) -> std::io::Result<()> {
+        self.writer.write_all(&b" ".repeat(self.cur_indent))
+    }
+
+    /// Write an attribute list
+    fn write_attrs_escape_vals(&mut self, attrs: &[(&str, &str)]) -> std::io::Result<()> {
+        for (name, val) in attrs {
+            self.writer.write_all(b" ")?;
+            self.writer.write_all(name.as_bytes())?;
+            self.writer.write_all(br#"=""#)?;
+            self.writer
+                .write_all(Self::escape_attr_value(val).as_bytes())?;
+            self.writer.write_all(b"\"")?;
+        }
+        Ok(())
+    }
+
+    /// Escape the given attribute value, making sure we only actually clone the string if we needed to replace something.
+    fn escape_attr_value(s: &str) -> Cow<str> {
+        let mut last_escape: usize = 0;
+        let mut res: Cow<str> = Cow::Borrowed("");
+        // iterating via char_indices gives us the ability to index the original string slice at character boundaries
+        for (idx, c) in s.char_indices() {
+            match Self::should_escape_char(c) {
+                None => {}
+                Some(new) => {
+                    // add characters since the last escape we did
+                    res += &s[last_escape..idx];
+                    // add the escaped value
+                    res += new;
+                    last_escape = idx + 1;
+                }
+            }
+        }
+        // we did not need to escape anything, so borrow original string
+        if last_escape == 0 {
+            Cow::Borrowed(s)
+        } else {
+            // add the remaining characters
+            res += &s[last_escape..];
+            res
+        }
+    }
+
+    fn should_escape_char(c: char) -> Option<&'static str> {
+        match c {
+            '<' => Some("&lt;"),
+            '>' => Some("&gt;"),
+            '"' => Some("&quot;"),
+            '\'' => Some("&apos;"),
+            '&' => Some("&amp;"),
+            '\n' => Some("&#xA;"),
+            '\r' => Some("&#xD;"),
+            _ => None,
+        }
+    }
+
+    /// Extends the existing context with more context elements.
+    fn extend_context<T>(&mut self, iter: T)
+    where
+        T: IntoIterator<Item = NixContextElement>,
+    {
+        self.context.extend(iter)
+    }
+
+    /// Consumes [Self] and returns the [NixContext] collected.
+    fn into_context(self) -> NixContext {
+        self.context
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use bytes::buf::Writer;
+    use pretty_assertions::assert_eq;
+
+    use crate::builtins::to_xml::XmlEmitter;
+    use std::borrow::Cow;
+
+    #[test]
+    fn xml_gen() {
+        let mut buf = Vec::new();
+        let mut x = XmlEmitter::new(&mut buf);
+        x.write_open_tag("hello", &[("hi", "it’s me"), ("no", "<escape>")])
+            .unwrap();
+        x.write_self_closing_tag("self-closing", &[("tag", "yay")])
+            .unwrap();
+        x.write_closing_tag("hello").unwrap();
+
+        assert_eq!(
+            std::str::from_utf8(&buf).unwrap(),
+            r##"<hello hi="it’s me" no="&lt;escape&gt;">
+  <self-closing tag="yay" />
+</hello>
+"##
+        );
+    }
+
+    #[test]
+    fn xml_escape() {
+        match XmlEmitter::<Writer<Vec<u8>>>::escape_attr_value("ab<>c&de") {
+            Cow::Owned(s) => assert_eq!(s, "ab&lt;&gt;c&amp;de".to_string(), "escape stuff"),
+            Cow::Borrowed(s) => panic!("s should be owned {}", s),
+        }
+        match XmlEmitter::<Writer<Vec<u8>>>::escape_attr_value("") {
+            Cow::Borrowed(s) => assert_eq!(s, "", "empty escape is borrowed"),
+            Cow::Owned(s) => panic!("s should be borrowed {}", s),
+        }
+        match XmlEmitter::<Writer<Vec<u8>>>::escape_attr_value("hi!ŷbla") {
+            Cow::Borrowed(s) => assert_eq!(s, "hi!ŷbla", "no escape is borrowed"),
+            Cow::Owned(s) => panic!("s should be borrowed {}", s),
+        }
+        match XmlEmitter::<Writer<Vec<u8>>>::escape_attr_value("hi!<ŷ>bla") {
+            Cow::Owned(s) => assert_eq!(
+                s,
+                "hi!&lt;ŷ&gt;bla".to_string(),
+                "multi-byte chars are correctly used"
+            ),
+            Cow::Borrowed(s) => panic!("s should be owned {}", s),
+        }
+    }
+}
diff --git a/tvix/eval/src/errors.rs b/tvix/eval/src/errors.rs
index 652252dadf..a5f79c235f 100644
--- a/tvix/eval/src/errors.rs
+++ b/tvix/eval/src/errors.rs
@@ -10,7 +10,6 @@ use std::{fmt::Debug, fmt::Display, num::ParseIntError};
 use codemap::{File, Span};
 use codemap_diagnostic::{ColorConfig, Diagnostic, Emitter, Level, SpanLabel, SpanStyle};
 use smol_str::SmolStr;
-use xml::writer::Error as XmlError;
 
 use crate::spans::ToSpan;
 use crate::value::{CoercionKind, NixString};
@@ -194,9 +193,6 @@ pub enum ErrorKind {
     /// Invalid UTF-8 was encoutered somewhere
     Utf8,
 
-    /// Errors while serialising to XML.
-    Xml(Rc<XmlError>),
-
     /// Variant for errors that bubble up to eval from other Tvix
     /// components.
     TvixError(Rc<dyn error::Error>),
@@ -248,7 +244,6 @@ impl error::Error for Error {
                 errors.first().map(|e| e as &dyn error::Error)
             }
             ErrorKind::IO { error, .. } => Some(error.as_ref()),
-            ErrorKind::Xml(error) => Some(error.as_ref()),
             ErrorKind::TvixError(error) => Some(error.as_ref()),
             _ => None,
         }
@@ -285,12 +280,6 @@ impl From<bstr::FromUtf8Error> for ErrorKind {
     }
 }
 
-impl From<XmlError> for ErrorKind {
-    fn from(err: XmlError) -> Self {
-        Self::Xml(Rc::new(err))
-    }
-}
-
 impl From<io::Error> for ErrorKind {
     fn from(e: io::Error) -> Self {
         ErrorKind::IO {
@@ -506,8 +495,6 @@ to a missing value in the attribute set(s) included via `with`."#,
                 write!(f, "Invalid UTF-8 in string")
             }
 
-            ErrorKind::Xml(error) => write!(f, "failed to serialise to XML: {error}"),
-
             ErrorKind::TvixError(inner_error) => {
                 write!(f, "{inner_error}")
             }
@@ -823,7 +810,6 @@ impl Error {
             | ErrorKind::JsonError(_)
             | ErrorKind::NotSerialisableToJson(_)
             | ErrorKind::FromTomlError(_)
-            | ErrorKind::Xml(_)
             | ErrorKind::Utf8
             | ErrorKind::TvixError(_)
             | ErrorKind::TvixBug { .. }
@@ -870,7 +856,6 @@ impl Error {
             ErrorKind::UnexpectedArgument { .. } => "E031",
             ErrorKind::RelativePathResolution(_) => "E032",
             ErrorKind::DivisionByZero => "E033",
-            ErrorKind::Xml(_) => "E034",
             ErrorKind::FromTomlError(_) => "E035",
             ErrorKind::NotSerialisableToJson(_) => "E036",
             ErrorKind::UnexpectedContext => "E037",
diff --git a/tvix/eval/src/io.rs b/tvix/eval/src/io.rs
index f775077af8..58586030aa 100644
--- a/tvix/eval/src/io.rs
+++ b/tvix/eval/src/io.rs
@@ -16,14 +16,16 @@
 //! how store paths are opened and so on.
 
 use std::{
-    fs::File,
     io,
     path::{Path, PathBuf},
 };
 
-#[cfg(target_family = "unix")]
+#[cfg(all(target_family = "unix", feature = "impure"))]
 use std::os::unix::ffi::OsStringExt;
 
+#[cfg(feature = "impure")]
+use std::fs::File;
+
 /// Types of files as represented by `builtins.readDir` in Nix.
 #[derive(Debug)]
 pub enum FileType {
diff --git a/tvix/eval/src/lib.rs b/tvix/eval/src/lib.rs
index 845964cb7e..398da4d6e2 100644
--- a/tvix/eval/src/lib.rs
+++ b/tvix/eval/src/lib.rs
@@ -29,7 +29,7 @@ mod vm;
 mod warnings;
 
 mod nix_search_path;
-#[cfg(test)]
+#[cfg(all(test, feature = "arbitrary"))]
 mod properties;
 #[cfg(test)]
 mod test_utils;
diff --git a/tvix/eval/src/nix_search_path.rs b/tvix/eval/src/nix_search_path.rs
index 566ca12238..369c5b6857 100644
--- a/tvix/eval/src/nix_search_path.rs
+++ b/tvix/eval/src/nix_search_path.rs
@@ -197,6 +197,8 @@ mod tests {
         }
     }
 
+    // this uses StdIO, which is only available with the impure feature.
+    #[cfg(feature = "impure")]
     mod resolve {
         use crate::StdIO;
         use path_clean::PathClean;
diff --git a/tvix/eval/src/tests/mod.rs b/tvix/eval/src/tests/mod.rs
index 5a7708e298..21b5d35e6a 100644
--- a/tvix/eval/src/tests/mod.rs
+++ b/tvix/eval/src/tests/mod.rs
@@ -1,203 +1,6 @@
-use crate::{value::Value, EvalIO};
-use builtin_macros::builtins;
-use pretty_assertions::assert_eq;
-use rstest::rstest;
-use std::path::PathBuf;
-
 /// Module for one-off tests which do not follow the rest of the
 /// test layout.
 mod one_offs;
 
-#[builtins]
-mod mock_builtins {
-    //! Builtins which are required by language tests, but should not
-    //! actually exist in //tvix/eval.
-    use crate as tvix_eval;
-    use crate::generators::GenCo;
-    use crate::*;
-    use genawaiter::rc::Gen;
-
-    #[builtin("derivation")]
-    async fn builtin_derivation(co: GenCo, input: Value) -> Result<Value, ErrorKind> {
-        let input = input.to_attrs()?;
-        let attrs = input.update(NixAttrs::from_iter(
-            [
-                (
-                    "outPath",
-                    "/nix/store/00000000000000000000000000000000-mock",
-                ),
-                (
-                    "drvPath",
-                    "/nix/store/00000000000000000000000000000000-mock.drv",
-                ),
-                ("type", "derivation"),
-            ]
-            .into_iter(),
-        ));
-
-        Ok(Value::Attrs(Box::new(attrs)))
-    }
-}
-
-fn eval_test(code_path: PathBuf, expect_success: bool) {
-    std::env::set_var("TEST_VAR", "foo"); // for eval-okay-getenv.nix
-
-    eprintln!("path: {}", code_path.display());
-    assert_eq!(
-        code_path.extension().unwrap(),
-        "nix",
-        "test files always end in .nix"
-    );
-
-    let code = std::fs::read_to_string(&code_path).expect("should be able to read test code");
-
-    let mut eval = crate::Evaluation::new_impure();
-    eval.strict = true;
-    eval.builtins.extend(mock_builtins::builtins());
-
-    let result = eval.evaluate(code, Some(code_path.clone()));
-    let failed = match result.value {
-        Some(Value::Catchable(_)) => true,
-        _ => !result.errors.is_empty(),
-    };
-    if expect_success && failed {
-        panic!(
-            "{}: evaluation of eval-okay test should succeed, but failed with {:?}",
-            code_path.display(),
-            result.errors,
-        );
-    }
-
-    if !expect_success && failed {
-        return;
-    }
-    // !expect_success can also mean the output differs, so don't panic if the
-    // evaluation didn't fail.
-
-    let value = result.value.unwrap();
-    let result_str = value.to_string();
-
-    let exp_path = code_path.with_extension("exp");
-    if exp_path.exists() {
-        // If there's an .exp file provided alongside, compare it with the
-        // output of the NixValue .to_string() method.
-        let exp_str = std::fs::read_to_string(&exp_path).expect("unable to read .exp file");
-
-        if expect_success {
-            assert_eq!(
-                result_str,
-                exp_str.trim(),
-                "{}: result value representation (left) must match expectation (right)",
-                code_path.display()
-            );
-        } else {
-            assert_ne!(
-                result_str,
-                exp_str.trim(),
-                "{}: test passed unexpectedly!  consider moving it out of notyetpassing",
-                code_path.display()
-            );
-
-            // Early return here, we don't compare .xml outputs if this is a !
-            // expect_success test.
-            return;
-        }
-    }
-
-    let exp_xml_path = code_path.with_extension("exp.xml");
-    if exp_xml_path.exists() {
-        // If there's an XML file provided alongside, compare it with the
-        // output produced when serializing the Value as XML.
-        let exp_xml_str = std::fs::read_to_string(exp_xml_path).expect("unable to read .xml file");
-
-        let mut xml_actual_buf = Vec::new();
-        crate::builtins::value_to_xml(&mut xml_actual_buf, &value).expect("value_to_xml failed");
-
-        assert_eq!(
-            String::from_utf8(xml_actual_buf).expect("to_xml produced invalid utf-8"),
-            exp_xml_str,
-            "{}: result value representation (left) must match expectation (right)",
-            code_path.display()
-        );
-    }
-}
-
-// identity-* tests contain Nix code snippets which should evaluate to
-// themselves exactly (i.e. literals).
-#[rstest]
-fn identity(#[files("src/tests/tvix_tests/identity-*.nix")] code_path: PathBuf) {
-    let code = std::fs::read_to_string(code_path).expect("should be able to read test code");
-
-    let mut eval = crate::Evaluation::new(Box::new(crate::StdIO) as Box<dyn EvalIO>, false);
-    eval.strict = true;
-
-    let result = eval.evaluate(&code, None);
-    assert!(
-        result.errors.is_empty(),
-        "evaluation of identity test failed: {:?}",
-        result.errors
-    );
-
-    let result_str = result.value.unwrap().to_string();
-
-    assert_eq!(
-        result_str,
-        code.trim(),
-        "result value representation (left) must match expectation (right)"
-    )
-}
-
-// eval-okay-* tests contain a snippet of Nix code, and an expectation
-// of the produced string output of the evaluator.
-//
-// These evaluations are always supposed to succeed, i.e. all snippets
-// are guaranteed to be valid Nix code.
-#[rstest]
-fn eval_okay(#[files("src/tests/tvix_tests/eval-okay-*.nix")] code_path: PathBuf) {
-    eval_test(code_path, true)
-}
-
-// eval-okay-* tests from the original Nix test suite.
-#[cfg(feature = "nix_tests")]
-#[rstest]
-fn nix_eval_okay(#[files("src/tests/nix_tests/eval-okay-*.nix")] code_path: PathBuf) {
-    eval_test(code_path, true)
-}
-
-// eval-okay-* tests from the original Nix test suite which do not yet pass for tvix
-//
-// Eventually there will be none of these left, and this function
-// will disappear :)
-//
-// Please don't submit failing tests unless they're in
-// notyetpassing; this makes the test suite much more useful for
-// regression testing, since there should always be zero non-ignored
-// failing tests.
-#[rstest]
-fn nix_eval_okay_currently_failing(
-    #[files("src/tests/nix_tests/notyetpassing/eval-okay-*.nix")] code_path: PathBuf,
-) {
-    eval_test(code_path, false)
-}
-
-#[rstest]
-fn eval_okay_currently_failing(
-    #[files("src/tests/tvix_tests/notyetpassing/eval-okay-*.nix")] code_path: PathBuf,
-) {
-    eval_test(code_path, false)
-}
-
-// eval-fail-* tests contain a snippet of Nix code, which is
-// expected to fail evaluation.  The exact type of failure
-// (assertion, parse error, etc) is not currently checked.
-#[rstest]
-fn eval_fail(#[files("src/tests/tvix_tests/eval-fail-*.nix")] code_path: PathBuf) {
-    eval_test(code_path, false)
-}
-
-// eval-fail-* tests from the original Nix test suite.
 #[cfg(feature = "nix_tests")]
-#[rstest]
-fn nix_eval_fail(#[files("src/tests/nix_tests/eval-fail-*.nix")] code_path: PathBuf) {
-    eval_test(code_path, false)
-}
+mod nix_tests;
diff --git a/tvix/eval/src/tests/nix_tests.rs b/tvix/eval/src/tests/nix_tests.rs
new file mode 100644
index 0000000000..17968e4bdb
--- /dev/null
+++ b/tvix/eval/src/tests/nix_tests.rs
@@ -0,0 +1,207 @@
+use crate::value::Value;
+use builtin_macros::builtins;
+use pretty_assertions::assert_eq;
+use rstest::rstest;
+use std::path::PathBuf;
+
+#[builtins]
+mod mock_builtins {
+    //! Builtins which are required by language tests, but should not
+    //! actually exist in //tvix/eval.
+    use crate as tvix_eval;
+    use crate::generators::GenCo;
+    use crate::*;
+    use genawaiter::rc::Gen;
+
+    #[builtin("derivation")]
+    async fn builtin_derivation(co: GenCo, input: Value) -> Result<Value, ErrorKind> {
+        let input = input.to_attrs()?;
+        let attrs = input.update(NixAttrs::from_iter(
+            [
+                (
+                    "outPath",
+                    "/nix/store/00000000000000000000000000000000-mock",
+                ),
+                (
+                    "drvPath",
+                    "/nix/store/00000000000000000000000000000000-mock.drv",
+                ),
+                ("type", "derivation"),
+            ]
+            .into_iter(),
+        ));
+
+        Ok(Value::Attrs(Box::new(attrs)))
+    }
+}
+
+#[cfg(feature = "impure")]
+fn eval_test(code_path: PathBuf, expect_success: bool) {
+    std::env::set_var("TEST_VAR", "foo"); // for eval-okay-getenv.nix
+
+    eprintln!("path: {}", code_path.display());
+    assert_eq!(
+        code_path.extension().unwrap(),
+        "nix",
+        "test files always end in .nix"
+    );
+
+    let code = std::fs::read_to_string(&code_path).expect("should be able to read test code");
+
+    let mut eval = crate::Evaluation::new_impure();
+    eval.strict = true;
+    eval.builtins.extend(mock_builtins::builtins());
+
+    let result = eval.evaluate(code, Some(code_path.clone()));
+    let failed = match result.value {
+        Some(Value::Catchable(_)) => true,
+        _ => !result.errors.is_empty(),
+    };
+    if expect_success && failed {
+        panic!(
+            "{}: evaluation of eval-okay test should succeed, but failed with {:?}",
+            code_path.display(),
+            result.errors,
+        );
+    }
+
+    if !expect_success && failed {
+        return;
+    }
+    // !expect_success can also mean the output differs, so don't panic if the
+    // evaluation didn't fail.
+
+    let value = result.value.unwrap();
+    let result_str = value.to_string();
+
+    let exp_path = code_path.with_extension("exp");
+    if exp_path.exists() {
+        // If there's an .exp file provided alongside, compare it with the
+        // output of the NixValue .to_string() method.
+        let exp_str = std::fs::read_to_string(&exp_path).expect("unable to read .exp file");
+
+        if expect_success {
+            assert_eq!(
+                result_str,
+                exp_str.trim(),
+                "{}: result value representation (left) must match expectation (right)",
+                code_path.display()
+            );
+        } else {
+            assert_ne!(
+                result_str,
+                exp_str.trim(),
+                "{}: test passed unexpectedly!  consider moving it out of notyetpassing",
+                code_path.display()
+            );
+
+            // Early return here, we don't compare .xml outputs if this is a !
+            // expect_success test.
+            return;
+        }
+    }
+
+    let exp_xml_path = code_path.with_extension("exp.xml");
+    if exp_xml_path.exists() {
+        // If there's an XML file provided alongside, compare it with the
+        // output produced when serializing the Value as XML.
+        let exp_xml_str = std::fs::read_to_string(exp_xml_path).expect("unable to read .xml file");
+
+        let mut xml_actual_buf = Vec::new();
+        crate::builtins::value_to_xml(&mut xml_actual_buf, &value).expect("value_to_xml failed");
+
+        assert_eq!(
+            String::from_utf8(xml_actual_buf).expect("to_xml produced invalid utf-8"),
+            exp_xml_str,
+            "{}: result value representation (left) must match expectation (right)",
+            code_path.display()
+        );
+    }
+}
+
+// identity-* tests contain Nix code snippets which should evaluate to
+// themselves exactly (i.e. literals).
+#[cfg(feature = "impure")]
+#[rstest]
+fn identity(#[files("src/tests/tvix_tests/identity-*.nix")] code_path: PathBuf) {
+    use crate::EvalIO;
+
+    let code = std::fs::read_to_string(code_path).expect("should be able to read test code");
+
+    let mut eval = crate::Evaluation::new(Box::new(crate::StdIO) as Box<dyn EvalIO>, false);
+    eval.strict = true;
+
+    let result = eval.evaluate(&code, None);
+    assert!(
+        result.errors.is_empty(),
+        "evaluation of identity test failed: {:?}",
+        result.errors
+    );
+
+    let result_str = result.value.unwrap().to_string();
+
+    assert_eq!(
+        result_str,
+        code.trim(),
+        "result value representation (left) must match expectation (right)"
+    )
+}
+
+// eval-okay-* tests contain a snippet of Nix code, and an expectation
+// of the produced string output of the evaluator.
+//
+// These evaluations are always supposed to succeed, i.e. all snippets
+// are guaranteed to be valid Nix code.
+#[cfg(feature = "impure")]
+#[rstest]
+fn eval_okay(#[files("src/tests/tvix_tests/eval-okay-*.nix")] code_path: PathBuf) {
+    eval_test(code_path, true)
+}
+
+// eval-okay-* tests from the original Nix test suite.
+#[cfg(feature = "impure")]
+#[rstest]
+fn nix_eval_okay(#[files("src/tests/nix_tests/eval-okay-*.nix")] code_path: PathBuf) {
+    eval_test(code_path, true)
+}
+
+// eval-okay-* tests from the original Nix test suite which do not yet pass for tvix
+//
+// Eventually there will be none of these left, and this function
+// will disappear :)
+//
+// Please don't submit failing tests unless they're in
+// notyetpassing; this makes the test suite much more useful for
+// regression testing, since there should always be zero non-ignored
+// failing tests.
+#[cfg(feature = "impure")]
+#[rstest]
+fn nix_eval_okay_currently_failing(
+    #[files("src/tests/nix_tests/notyetpassing/eval-okay-*.nix")] code_path: PathBuf,
+) {
+    eval_test(code_path, false)
+}
+
+#[cfg(feature = "impure")]
+#[rstest]
+fn eval_okay_currently_failing(
+    #[files("src/tests/tvix_tests/notyetpassing/eval-okay-*.nix")] code_path: PathBuf,
+) {
+    eval_test(code_path, false)
+}
+
+// eval-fail-* tests contain a snippet of Nix code, which is
+// expected to fail evaluation.  The exact type of failure
+// (assertion, parse error, etc) is not currently checked.
+#[cfg(feature = "impure")]
+#[rstest]
+fn eval_fail(#[files("src/tests/tvix_tests/eval-fail-*.nix")] code_path: PathBuf) {
+    eval_test(code_path, false)
+}
+
+// eval-fail-* tests from the original Nix test suite.
+#[cfg(feature = "impure")]
+#[rstest]
+fn nix_eval_fail(#[files("src/tests/nix_tests/eval-fail-*.nix")] code_path: PathBuf) {
+    eval_test(code_path, false)
+}
diff --git a/tvix/eval/src/tests/one_offs.rs b/tvix/eval/src/tests/one_offs.rs
index 565d1dd48f..21e9144baf 100644
--- a/tvix/eval/src/tests/one_offs.rs
+++ b/tvix/eval/src/tests/one_offs.rs
@@ -5,7 +5,7 @@ fn test_source_builtin() {
     // Test an evaluation with a source-only builtin. The test ensures
     // that the artificially constructed thunking is correct.
 
-    let mut eval = Evaluation::new_impure();
+    let mut eval = Evaluation::new_pure();
     eval.src_builtins.push(("testSourceBuiltin", "42"));
 
     let result = eval.evaluate("builtins.testSourceBuiltin", None);
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.exp
new file mode 100644
index 0000000000..eb2117a0ce
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.exp
@@ -0,0 +1 @@
+[ [ "" [ "a" ] "c" ] [ "" [ "a" ] "b" [ "c" ] "" ] [ "" [ "a" null ] "b" [ null "c" ] "" ] [ " " [ "FOO" ] " " ] [ "" [ "abc" ] "" [ "" ] "" ] [ "" [ "abc" ] "" [ "" ] "" ] [ "" [ ] "" ] ]
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.nix
new file mode 100644
index 0000000000..95305040dc
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.nix
@@ -0,0 +1,10 @@
+[
+  (builtins.split "(a)b" "abc")
+  (builtins.split "([ac])" "abc")
+  (builtins.split "(a)|(c)" "abc")
+  (builtins.split "([[:upper:]]+)" " FOO ")
+
+  (builtins.split "(.*)" "abc")
+  (builtins.split "([abc]*)" "abc")
+  (builtins.split ".*" "")
+]
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.exp.xml b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.exp.xml
new file mode 100644
index 0000000000..468972b2f8
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.exp.xml
@@ -0,0 +1,5 @@
+<?xml version='1.0' encoding='utf-8'?>
+<expr>
+  <attrs>
+  </attrs>
+</expr>
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.nix
new file mode 100644
index 0000000000..ffcd4415b0
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.nix
@@ -0,0 +1 @@
+{ }
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-toxml.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml.exp
new file mode 100644
index 0000000000..9ae16de526
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml.exp
@@ -0,0 +1 @@
+"<?xml version='1.0' encoding='utf-8'?>\n<expr>\n  <attrs>\n    <attr name=\"&amp;-{\">\n      <string value=\";&amp;&quot;\" />\n    </attr>\n    <attr name=\"a\">\n      <string value=\"s\" />\n    </attr>\n  </attrs>\n</expr>\n"
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-toxml.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml.nix
new file mode 100644
index 0000000000..7d074048dd
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml.nix
@@ -0,0 +1,2 @@
+# Check some corner cases regarding escaping.
+builtins.toXML { a = "s"; "&-{" = ";&\""; }
diff --git a/tvix/eval/src/value/json.rs b/tvix/eval/src/value/json.rs
index c48e9c1f4e..24a6bcaf6f 100644
--- a/tvix/eval/src/value/json.rs
+++ b/tvix/eval/src/value/json.rs
@@ -47,8 +47,8 @@ impl Value {
 
                 for val in l.into_iter() {
                     match generators::request_to_json(co, val).await {
-                        Ok((v, mut ctx)) => {
-                            context = context.join(&mut ctx);
+                        Ok((v, ctx)) => {
+                            context.extend(ctx.into_iter());
                             out.push(v)
                         }
                         Err(cek) => return Ok(Err(cek)),
@@ -100,8 +100,8 @@ impl Value {
                     out.insert(
                         name.to_str()?.to_owned(),
                         match generators::request_to_json(co, value).await {
-                            Ok((v, mut ctx)) => {
-                                context = context.join(&mut ctx);
+                            Ok((v, ctx)) => {
+                                context.extend(ctx.into_iter());
                                 v
                             }
                             Err(cek) => return Ok(Err(cek)),
diff --git a/tvix/eval/src/value/mod.rs b/tvix/eval/src/value/mod.rs
index c171c9a04e..dfad0cd839 100644
--- a/tvix/eval/src/value/mod.rs
+++ b/tvix/eval/src/value/mod.rs
@@ -338,8 +338,8 @@ impl Value {
             let coerced: Result<BString, _> = match (value, kind) {
                 // coercions that are always done
                 (Value::String(mut s), _) => {
-                    if let Some(ctx) = s.context_mut() {
-                        context = context.join(ctx);
+                    if let Some(ctx) = s.take_context() {
+                        context.extend(ctx.into_iter());
                     }
                     Ok((*s).into())
                 }
diff --git a/tvix/eval/src/value/string.rs b/tvix/eval/src/value/string.rs
index ceb43f1ea5..4f869eb00d 100644
--- a/tvix/eval/src/value/string.rs
+++ b/tvix/eval/src/value/string.rs
@@ -54,6 +54,12 @@ impl From<HashSet<NixContextElement>> for NixContext {
     }
 }
 
+impl<const N: usize> From<[NixContextElement; N]> for NixContext {
+    fn from(value: [NixContextElement; N]) -> Self {
+        Self(HashSet::from(value))
+    }
+}
+
 impl NixContext {
     /// Creates an empty context that can be populated
     /// and passed to form a contextful [NixString], albeit
@@ -78,20 +84,19 @@ impl NixContext {
         self
     }
 
-    /// Consumes both ends of the join into a new NixContent
-    /// containing the union of elements of both ends.
-    pub fn join(mut self, other: &mut NixContext) -> Self {
-        let other_set = std::mem::take(&mut other.0);
-        let mut set: HashSet<NixContextElement> = std::mem::take(&mut self.0);
-        set.extend(other_set);
-        Self(set)
+    /// Extends the existing context with more context elements.
+    pub fn extend<T>(&mut self, iter: T)
+    where
+        T: IntoIterator<Item = NixContextElement>,
+    {
+        self.0.extend(iter)
     }
 
     /// Copies from another [NixString] its context strings
     /// in this context.
     pub fn mimic(&mut self, other: &NixString) {
         if let Some(context) = other.context() {
-            self.0.extend(context.iter().cloned());
+            self.extend(context.iter().cloned());
         }
     }
 
@@ -154,6 +159,16 @@ impl NixContext {
     }
 }
 
+impl IntoIterator for NixContext {
+    type Item = NixContextElement;
+
+    type IntoIter = std::collections::hash_set::IntoIter<NixContextElement>;
+
+    fn into_iter(self) -> Self::IntoIter {
+        self.0.into_iter()
+    }
+}
+
 /// This type is never instantiated, but serves to document the memory layout of the actual heap
 /// allocation for Nix strings.
 #[allow(dead_code)]
@@ -616,6 +631,11 @@ mod arbitrary {
 
 impl NixString {
     fn new(contents: &[u8], context: Option<Box<NixContext>>) -> Self {
+        debug_assert!(
+            !context.as_deref().is_some_and(NixContext::is_empty),
+            "BUG: initialized with empty context"
+        );
+
         // SAFETY: We're always fully initializing a NixString here:
         //
         // 1. NixStringInner::alloc sets up the len for us
@@ -707,8 +727,10 @@ impl NixString {
         let context = [self.context(), other.context()]
             .into_iter()
             .flatten()
-            .fold(NixContext::new(), |acc_ctx, new_ctx| {
-                acc_ctx.join(&mut new_ctx.clone())
+            .fold(NixContext::new(), |mut acc_ctx, new_ctx| {
+                // TODO: consume new_ctx?
+                acc_ctx.extend(new_ctx.iter().cloned());
+                acc_ctx
             });
         Self::new_context_from(context, s)
     }
@@ -719,16 +741,30 @@ impl NixString {
         //
         // Also, we're using the same lifetime and mutability as self, to fit the
         // pointer-to-reference conversion rules
-        unsafe { NixStringInner::context_ref(self.0).as_deref() }
+        let context = unsafe { NixStringInner::context_ref(self.0).as_deref() };
+
+        debug_assert!(
+            !context.is_some_and(NixContext::is_empty),
+            "BUG: empty context"
+        );
+
+        context
     }
 
-    pub(crate) fn context_mut(&mut self) -> Option<&mut NixContext> {
+    pub(crate) fn context_mut(&mut self) -> &mut Option<Box<NixContext>> {
         // SAFETY: There's no way to construct an uninitialized or invalid NixString (see the SAFETY
         // comment in `new`).
         //
         // Also, we're using the same lifetime and mutability as self, to fit the
         // pointer-to-reference conversion rules
-        unsafe { NixStringInner::context_mut(self.0).as_deref_mut() }
+        let context = unsafe { NixStringInner::context_mut(self.0) };
+
+        debug_assert!(
+            !context.as_deref().is_some_and(NixContext::is_empty),
+            "BUG: empty context"
+        );
+
+        context
     }
 
     pub fn iter_context(&self) -> impl Iterator<Item = &NixContext> {
@@ -756,12 +792,16 @@ impl NixString {
         self.context().is_some()
     }
 
+    /// This clears the context of the string, returning
+    /// the removed dependency tracking information.
+    pub fn take_context(&mut self) -> Option<Box<NixContext>> {
+        self.context_mut().take()
+    }
+
     /// This clears the context of that string, losing
     /// all dependency tracking information.
     pub fn clear_context(&mut self) {
-        // SAFETY: There's no way to construct an uninitialized or invalid NixString (see the SAFETY
-        // comment in `new`).
-        *unsafe { NixStringInner::context_mut(self.0) } = None;
+        let _ = self.take_context();
     }
 
     pub fn chars(&self) -> Chars<'_> {
@@ -849,7 +889,7 @@ impl Display for NixString {
     }
 }
 
-#[cfg(test)]
+#[cfg(all(test, feature = "arbitrary"))]
 mod tests {
     use test_strategy::proptest;
 
diff --git a/tvix/eval/src/vm/generators.rs b/tvix/eval/src/vm/generators.rs
index 79de688692..dbf7703bf0 100644
--- a/tvix/eval/src/vm/generators.rs
+++ b/tvix/eval/src/vm/generators.rs
@@ -745,6 +745,7 @@ pub async fn request_open_file(co: &GenCo, path: PathBuf) -> Box<dyn std::io::Re
     }
 }
 
+#[cfg_attr(not(feature = "impure"), allow(unused))]
 pub(crate) async fn request_path_exists(co: &GenCo, path: PathBuf) -> Value {
     match co.yield_(VMRequest::PathExists(path)).await {
         VMResponse::Value(value) => value,
@@ -755,6 +756,7 @@ pub(crate) async fn request_path_exists(co: &GenCo, path: PathBuf) -> Value {
     }
 }
 
+#[cfg_attr(not(feature = "impure"), allow(unused))]
 pub(crate) async fn request_read_dir(co: &GenCo, path: PathBuf) -> Vec<(bytes::Bytes, FileType)> {
     match co.yield_(VMRequest::ReadDir(path)).await {
         VMResponse::Directory(dir) => dir,
diff --git a/tvix/eval/src/vm/mod.rs b/tvix/eval/src/vm/mod.rs
index 5c244cc3ca..1fa9eba4bc 100644
--- a/tvix/eval/src/vm/mod.rs
+++ b/tvix/eval/src/vm/mod.rs
@@ -994,8 +994,8 @@ where
             }
             let mut nix_string = val.to_contextful_str().with_span(frame, self)?;
             out.push_str(nix_string.as_bstr());
-            if let Some(nix_string_ctx) = nix_string.context_mut() {
-                context = context.join(nix_string_ctx);
+            if let Some(nix_string_ctx) = nix_string.take_context() {
+                context.extend(nix_string_ctx.into_iter())
             }
         }
 
diff --git a/tvix/eval/tests/nix_oracle.rs b/tvix/eval/tests/nix_oracle.rs
index 5a5cc0a822..3d3abc7363 100644
--- a/tvix/eval/tests/nix_oracle.rs
+++ b/tvix/eval/tests/nix_oracle.rs
@@ -56,6 +56,7 @@ fn nix_eval(expr: &str, strictness: Strictness) -> String {
 /// `NIX_INSTANTIATE_BINARY_PATH` env var to resolve the `nix-instantiate` binary) and tvix, and
 /// assert that the result is identical
 #[track_caller]
+#[cfg(feature = "impure")]
 fn compare_eval(expr: &str, strictness: Strictness) {
     let nix_result = nix_eval(expr, strictness);
     let mut eval = tvix_eval::Evaluation::new_pure();
@@ -76,6 +77,7 @@ fn compare_eval(expr: &str, strictness: Strictness) {
 macro_rules! compare_eval_tests {
     ($strictness:expr, {}) => {};
     ($strictness:expr, {$(#[$meta:meta])* $test_name: ident($expr: expr); $($rest:tt)*}) => {
+        #[cfg(feature = "impure")]
         #[test]
         $(#[$meta])*
         fn $test_name() {
diff --git a/tvix/glue/Cargo.toml b/tvix/glue/Cargo.toml
index 0afdefeaaa..6968210c5e 100644
--- a/tvix/glue/Cargo.toml
+++ b/tvix/glue/Cargo.toml
@@ -17,7 +17,9 @@ tvix-build = { path = "../build", default-features = false, features = []}
 tvix-eval = { path = "../eval" }
 tvix-castore = { path = "../castore" }
 tvix-store = { path = "../store", default-features = false, features = []}
+tvix-tracing = { path = "../tracing" }
 tracing = "0.1.37"
+tracing-indicatif = "0.3.6"
 tokio = "1.28.0"
 tokio-tar = "0.3.1"
 tokio-util = { version = "0.7.9", features = ["io", "io-util", "compat"] }
@@ -33,6 +35,9 @@ walkdir = "2.4.0"
 [dependencies.wu-manber]
 git = "https://github.com/tvlfyi/wu-manber.git"
 
+[target.'cfg(not(target_env = "msvc"))'.dependencies]
+tikv-jemallocator = "0.5"
+
 [dev-dependencies]
 criterion = { version = "0.5", features = ["html_reports"] }
 hex-literal = "0.4.1"
diff --git a/tvix/glue/benches/eval.rs b/tvix/glue/benches/eval.rs
index 202278c1aa..9e0154cad7 100644
--- a/tvix/glue/benches/eval.rs
+++ b/tvix/glue/benches/eval.rs
@@ -1,6 +1,8 @@
 use criterion::{black_box, criterion_group, criterion_main, Criterion};
 use lazy_static::lazy_static;
 use std::{env, rc::Rc, sync::Arc, time::Duration};
+#[cfg(not(target_env = "msvc"))]
+use tikv_jemallocator::Jemalloc;
 use tvix_build::buildservice::DummyBuildService;
 use tvix_eval::{builtins::impure_builtins, EvalIO};
 use tvix_glue::{
@@ -11,6 +13,10 @@ use tvix_glue::{
 };
 use tvix_store::utils::construct_services;
 
+#[cfg(not(target_env = "msvc"))]
+#[global_allocator]
+static GLOBAL: Jemalloc = Jemalloc;
+
 lazy_static! {
     static ref TOKIO_RUNTIME: tokio::runtime::Runtime = tokio::runtime::Runtime::new().unwrap();
 }
diff --git a/tvix/glue/build.rs b/tvix/glue/build.rs
new file mode 100644
index 0000000000..544c34a6cb
--- /dev/null
+++ b/tvix/glue/build.rs
@@ -0,0 +1,6 @@
+fn main() {
+    // Pick up new test case files
+    // https://github.com/la10736/rstest/issues/256
+    println!("cargo:rerun-if-changed=src/tests/nix_tests");
+    println!("cargo:rerun-if-changed=src/tests/tvix_tests")
+}
diff --git a/tvix/glue/default.nix b/tvix/glue/default.nix
index 08f5c2228d..14c7e214f2 100644
--- a/tvix/glue/default.nix
+++ b/tvix/glue/default.nix
@@ -1,8 +1,17 @@
-{ depot, pkgs, ... }:
+{ depot, pkgs, lib, ... }:
 
 (depot.tvix.crates.workspaceMembers.tvix-glue.build.override {
   runTests = true;
   testPreRun = ''
     export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt;
   '';
+}).overrideAttrs (old: rec {
+  meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+  passthru = depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = [ "nix_tests" ];
+    override.testPreRun = ''
+      export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt;
+    '';
+  };
 })
diff --git a/tvix/glue/src/builtins/derivation.rs b/tvix/glue/src/builtins/derivation.rs
index a7742ae40a..f266141cb6 100644
--- a/tvix/glue/src/builtins/derivation.rs
+++ b/tvix/glue/src/builtins/derivation.rs
@@ -170,6 +170,7 @@ pub(crate) mod derivation_builtins {
     use std::collections::BTreeMap;
 
     use crate::builtins::utils::{select_string, strong_importing_coerce_to_string};
+    use crate::fetchurl::fetchurl_derivation_to_fetch;
 
     use super::*;
     use bstr::ByteSlice;
@@ -372,12 +373,12 @@ pub(crate) mod derivation_builtins {
                             return Ok(val);
                         }
 
-                        let (val_json, mut context) = match val.into_contextful_json(&co).await? {
+                        let (val_json, context) = match val.into_contextful_json(&co).await? {
                             Ok(v) => v,
                             Err(cek) => return Ok(Value::from(cek)),
                         };
 
-                        input_context = input_context.join(&mut context);
+                        input_context.extend(context.into_iter());
 
                         // No need to check for dups, we only iterate over every attribute name once
                         structured_attrs.insert(arg_name.to_owned(), val_json);
@@ -506,6 +507,17 @@ pub(crate) mod derivation_builtins {
                 ))),
         )));
 
+        // If the derivation is a fake derivation (builtins:fetchurl),
+        // synthesize a [Fetch] and add it there, too.
+        if drv.builder == "builtin:fetchurl" {
+            let (name, fetch) =
+                fetchurl_derivation_to_fetch(&drv).map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
+
+            known_paths
+                .add_fetch(fetch, &name)
+                .map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
+        }
+
         // Register the Derivation in known_paths.
         known_paths.add_derivation(drv_path, drv);
 
diff --git a/tvix/glue/src/builtins/errors.rs b/tvix/glue/src/builtins/errors.rs
index f6d5745c56..af8a24e6ab 100644
--- a/tvix/glue/src/builtins/errors.rs
+++ b/tvix/glue/src/builtins/errors.rs
@@ -4,7 +4,7 @@ use nix_compat::{
     store_path::BuildStorePathError,
 };
 use reqwest::Url;
-use std::rc::Rc;
+use std::{path::PathBuf, rc::Rc};
 use thiserror::Error;
 use tvix_castore::import;
 
@@ -65,8 +65,12 @@ pub enum FetcherError {
 pub enum ImportError {
     #[error("non-file '{0}' cannot be imported in 'flat' mode")]
     FlatImportOfNonFile(String),
+
     #[error("hash mismatch at ingestion of '{0}', expected: '{1}', got: '{2}'")]
     HashMismatch(String, NixHash, NixHash),
+
+    #[error("path '{}' is not in the Nix store", .0.display())]
+    PathNotInStore(PathBuf),
 }
 
 impl From<ImportError> for tvix_eval::ErrorKind {
diff --git a/tvix/glue/src/builtins/fetchers.rs b/tvix/glue/src/builtins/fetchers.rs
index c7602c03e8..d95287aeca 100644
--- a/tvix/glue/src/builtins/fetchers.rs
+++ b/tvix/glue/src/builtins/fetchers.rs
@@ -6,9 +6,7 @@ use crate::{
     tvix_store_io::TvixStoreIO,
 };
 use nix_compat::nixhash;
-use nix_compat::nixhash::NixHash;
 use std::rc::Rc;
-use tracing::info;
 use tvix_eval::builtin_macros::builtins;
 use tvix_eval::generators::Gen;
 use tvix_eval::generators::GenCo;
@@ -80,6 +78,7 @@ async fn extract_fetch_args(
 #[builtins(state = "Rc<TvixStoreIO>")]
 pub(crate) mod fetcher_builtins {
     use crate::builtins::FetcherError;
+    use nix_compat::nixhash::NixHash;
     use url::Url;
 
     use super::*;
@@ -112,8 +111,6 @@ pub(crate) mod fetcher_builtins {
             }
             None => {
                 // If we don't have enough info, do the fetch now.
-                info!(?fetch, "triggering required fetch");
-
                 let (store_path, _root_node) = state
                     .tokio_handle
                     .block_on(async { state.fetcher.ingest_and_persist(&name, fetch).await })
@@ -147,7 +144,10 @@ pub(crate) mod fetcher_builtins {
         fetch_lazy(
             state,
             name,
-            Fetch::URL(url, args.sha256.map(NixHash::Sha256)),
+            Fetch::URL {
+                url,
+                exp_hash: args.sha256.map(NixHash::Sha256),
+            },
         )
     }
 
@@ -172,7 +172,14 @@ pub(crate) mod fetcher_builtins {
         let url = Url::parse(&args.url_str)
             .map_err(|e| ErrorKind::TvixError(Rc::new(FetcherError::InvalidUrl(e))))?;
 
-        fetch_lazy(state, name, Fetch::Tarball(url, args.sha256))
+        fetch_lazy(
+            state,
+            name,
+            Fetch::Tarball {
+                url,
+                exp_nar_sha256: args.sha256,
+            },
+        )
     }
 
     #[builtin("fetchGit")]
diff --git a/tvix/glue/src/builtins/import.rs b/tvix/glue/src/builtins/import.rs
index 4a15afa814..4a8a29b417 100644
--- a/tvix/glue/src/builtins/import.rs
+++ b/tvix/glue/src/builtins/import.rs
@@ -104,11 +104,13 @@ async fn filtered_ingest(
 
 #[builtins(state = "Rc<TvixStoreIO>")]
 mod import_builtins {
+    use std::os::unix::ffi::OsStrExt;
     use std::rc::Rc;
 
     use super::*;
 
     use nix_compat::nixhash::{CAHash, NixHash};
+    use nix_compat::store_path::StorePath;
     use tvix_eval::generators::Gen;
     use tvix_eval::{generators::GenCo, ErrorKind, Value};
     use tvix_eval::{NixContextElement, NixString};
@@ -280,6 +282,44 @@ mod import_builtins {
                 .into(),
         )
     }
+
+    #[builtin("storePath")]
+    async fn builtin_store_path(
+        state: Rc<TvixStoreIO>,
+        co: GenCo,
+        path: Value,
+    ) -> Result<Value, ErrorKind> {
+        let p = std::str::from_utf8(match &path {
+            Value::String(s) => s.as_bytes(),
+            Value::Path(p) => p.as_os_str().as_bytes(),
+            _ => {
+                return Err(ErrorKind::TypeError {
+                    expected: "string or path",
+                    actual: path.type_of(),
+                })
+            }
+        })?;
+
+        let path_exists = if let Ok((store_path, sub_path)) = StorePath::from_absolute_path_full(p)
+        {
+            if !sub_path.as_os_str().is_empty() {
+                false
+            } else {
+                state.store_path_exists(store_path.as_ref()).await?
+            }
+        } else {
+            false
+        };
+
+        if !path_exists {
+            return Err(ImportError::PathNotInStore(p.into()).into());
+        }
+
+        Ok(Value::String(NixString::new_context_from(
+            [NixContextElement::Plain(p.into())].into(),
+            p,
+        )))
+    }
 }
 
 pub use import_builtins::builtins as import_builtins;
diff --git a/tvix/glue/src/fetchers/decompression.rs b/tvix/glue/src/fetchers/decompression.rs
index f96fa60e34..69a8297e6a 100644
--- a/tvix/glue/src/fetchers/decompression.rs
+++ b/tvix/glue/src/fetchers/decompression.rs
@@ -1,5 +1,3 @@
-#![allow(dead_code)] // TODO
-
 use std::{
     io, mem,
     pin::Pin,
@@ -155,9 +153,7 @@ where
         };
 
         let mut our_buf = ReadBuf::new(buffer);
-        if let Err(e) = ready!(inner.as_pin_mut().unwrap().poll_read(cx, &mut our_buf)) {
-            return Poll::Ready(Err(e));
-        }
+        ready!(inner.as_pin_mut().unwrap().poll_read(cx, &mut our_buf))?;
 
         let data = our_buf.filled();
         if data.len() >= BYTES_NEEDED {
diff --git a/tvix/glue/src/fetchers/mod.rs b/tvix/glue/src/fetchers/mod.rs
index 1b2e1ee20c..6cce569e97 100644
--- a/tvix/glue/src/fetchers/mod.rs
+++ b/tvix/glue/src/fetchers/mod.rs
@@ -1,14 +1,15 @@
 use futures::TryStreamExt;
-use md5::Md5;
+use md5::{digest::DynDigest, Md5};
 use nix_compat::{
     nixhash::{CAHash, HashAlgo, NixHash},
     store_path::{build_ca_path, BuildStorePathError, StorePathRef},
 };
 use sha1::Sha1;
 use sha2::{digest::Output, Digest, Sha256, Sha512};
-use tokio::io::{AsyncBufRead, AsyncRead, AsyncWrite};
-use tokio_util::io::InspectReader;
-use tracing::warn;
+use tokio::io::{AsyncBufRead, AsyncRead, AsyncWrite, AsyncWriteExt, BufReader};
+use tokio_util::io::{InspectReader, InspectWriter};
+use tracing::{instrument, warn, Span};
+use tracing_indicatif::span_ext::IndicatifSpanExt;
 use tvix_castore::{
     blobservice::BlobService,
     directoryservice::DirectoryService,
@@ -25,10 +26,14 @@ use decompression::DecompressedReader;
 /// Representing options for doing a fetch.
 #[derive(Clone, Eq, PartialEq)]
 pub enum Fetch {
-    /// Fetch a literal file from the given URL, with an optional expected
-    /// NixHash of it.
-    /// TODO: check if this is *always* sha256, and if so, make it [u8; 32].
-    URL(Url, Option<NixHash>),
+    /// Fetch a literal file from the given URL,
+    /// with an optional expected hash.
+    URL {
+        /// The URL to fetch from.
+        url: Url,
+        /// The expected hash of the file.
+        exp_hash: Option<NixHash>,
+    },
 
     /// Fetch a tarball from the given URL and unpack.
     /// The file must be a tape archive (.tar), optionally compressed with gzip,
@@ -37,7 +42,37 @@ pub enum Fetch {
     /// so it is best if the tarball contains a single directory at top level.
     /// Optionally, a sha256 digest can be provided to verify the unpacked
     /// contents against.
-    Tarball(Url, Option<[u8; 32]>),
+    Tarball {
+        /// The URL to fetch from.
+        url: Url,
+        /// The expected hash of the contents, as NAR.
+        exp_nar_sha256: Option<[u8; 32]>,
+    },
+
+    /// Fetch a NAR file from the given URL and unpack.
+    /// The file can optionally be compressed.
+    NAR {
+        /// The URL to fetch from.
+        url: Url,
+        /// The expected hash of the NAR representation.
+        /// This unfortunately supports more than sha256.
+        hash: NixHash,
+    },
+
+    /// Fetches a file at a URL, makes it the store path root node,
+    /// but executable.
+    /// Used by <nix/fetchurl.nix>, with `executable = true;`.
+    /// The expected hash is over the NAR representation, but can be not SHA256:
+    /// ```nix
+    /// (import <nix/fetchurl.nix> { url = "https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz"; hash = "sha1-NKNeU1csW5YJ4lCeWH3Z/apppNU="; executable = true; })
+    /// ```
+    Executable {
+        /// The URL to fetch from.
+        url: Url,
+        /// The expected hash of the NAR representation.
+        /// This unfortunately supports more than sha256.
+        hash: NixHash,
+    },
 
     /// TODO
     Git(),
@@ -60,7 +95,7 @@ fn redact_url(url: &Url) -> Url {
 impl std::fmt::Debug for Fetch {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         match self {
-            Fetch::URL(url, exp_hash) => {
+            Fetch::URL { url, exp_hash } => {
                 let url = redact_url(url);
                 if let Some(exp_hash) = exp_hash {
                     write!(f, "URL [url: {}, exp_hash: Some({})]", &url, exp_hash)
@@ -68,19 +103,30 @@ impl std::fmt::Debug for Fetch {
                     write!(f, "URL [url: {}, exp_hash: None]", &url)
                 }
             }
-            Fetch::Tarball(url, exp_digest) => {
+            Fetch::Tarball {
+                url,
+                exp_nar_sha256,
+            } => {
                 let url = redact_url(url);
-                if let Some(exp_digest) = exp_digest {
+                if let Some(exp_nar_sha256) = exp_nar_sha256 {
                     write!(
                         f,
-                        "Tarball [url: {}, exp_hash: Some({})]",
+                        "Tarball [url: {}, exp_nar_sha256: Some({})]",
                         url,
-                        NixHash::Sha256(*exp_digest)
+                        NixHash::Sha256(*exp_nar_sha256)
                     )
                 } else {
                     write!(f, "Tarball [url: {}, exp_hash: None]", url)
                 }
             }
+            Fetch::NAR { url, hash } => {
+                let url = redact_url(url);
+                write!(f, "NAR [url: {}, hash: {}]", &url, hash)
+            }
+            Fetch::Executable { url, hash } => {
+                let url = redact_url(url);
+                write!(f, "Executable [url: {}, hash: {}]", &url, hash)
+            }
             Fetch::Git() => todo!(),
         }
     }
@@ -95,9 +141,28 @@ impl Fetch {
         name: &'a str,
     ) -> Result<Option<StorePathRef<'a>>, BuildStorePathError> {
         let ca_hash = match self {
-            Fetch::URL(_, Some(nixhash)) => CAHash::Flat(nixhash.clone()),
-            Fetch::Tarball(_, Some(nar_sha256)) => CAHash::Nar(NixHash::Sha256(*nar_sha256)),
-            _ => return Ok(None),
+            Fetch::URL {
+                exp_hash: Some(exp_hash),
+                ..
+            } => CAHash::Flat(exp_hash.clone()),
+
+            Fetch::Tarball {
+                exp_nar_sha256: Some(exp_nar_sha256),
+                ..
+            } => CAHash::Nar(NixHash::Sha256(*exp_nar_sha256)),
+
+            Fetch::NAR { hash, .. } | Fetch::Executable { hash, .. } => {
+                CAHash::Nar(hash.to_owned())
+            }
+
+            Fetch::Git() => todo!(),
+
+            // everything else
+            Fetch::URL { exp_hash: None, .. }
+            | Fetch::Tarball {
+                exp_nar_sha256: None,
+                ..
+            } => return Ok(None),
         };
 
         // calculate the store path of this fetch
@@ -132,7 +197,18 @@ impl<BS, DS, PS, NS> Fetcher<BS, DS, PS, NS> {
 
     /// Constructs a HTTP request to the passed URL, and returns a AsyncReadBuf to it.
     /// In case the URI uses the file:// scheme, use tokio::fs to open it.
-    async fn download(&self, url: Url) -> Result<Box<dyn AsyncBufRead + Unpin>, FetcherError> {
+    #[instrument(skip_all, fields(url, indicatif.pb_show=1), err)]
+    async fn download(
+        &self,
+        url: Url,
+    ) -> Result<Box<dyn AsyncBufRead + Unpin + Send>, FetcherError> {
+        let span = Span::current();
+        span.pb_set_message(&format!(
+            "📡Fetching {}",
+            // TOOD: maybe shorten
+            redact_url(&url)
+        ));
+
         match url.scheme() {
             "file" => {
                 let f = tokio::fs::File::open(url.to_file_path().map_err(|_| {
@@ -144,16 +220,38 @@ impl<BS, DS, PS, NS> Fetcher<BS, DS, PS, NS> {
                     ))
                 })?)
                 .await?;
-                Ok(Box::new(tokio::io::BufReader::new(f)))
+
+                span.pb_set_length(f.metadata().await?.len());
+                span.pb_set_style(&tvix_tracing::PB_TRANSFER_STYLE);
+                span.pb_start();
+                Ok(Box::new(tokio::io::BufReader::new(InspectReader::new(
+                    f,
+                    move |d| {
+                        span.pb_inc(d.len() as u64);
+                    },
+                ))))
             }
             _ => {
                 let resp = self.http_client.get(url).send().await?;
+
+                if let Some(content_length) = resp.content_length() {
+                    span.pb_set_length(content_length);
+                    span.pb_set_style(&tvix_tracing::PB_TRANSFER_STYLE);
+                } else {
+                    span.pb_set_style(&tvix_tracing::PB_TRANSFER_STYLE);
+                }
+                span.pb_start();
+
                 Ok(Box::new(tokio_util::io::StreamReader::new(
-                    resp.bytes_stream().map_err(|e| {
-                        let e = e.without_url();
-                        warn!(%e, "failed to get response body");
-                        std::io::Error::new(std::io::ErrorKind::BrokenPipe, e)
-                    }),
+                    resp.bytes_stream()
+                        .inspect_ok(move |d| {
+                            span.pb_inc(d.len() as u64);
+                        })
+                        .map_err(|e| {
+                            let e = e.without_url();
+                            warn!(%e, "failed to get response body");
+                            std::io::Error::new(std::io::ErrorKind::BrokenPipe, e)
+                        }),
                 )))
             }
         }
@@ -190,7 +288,7 @@ where
     /// didn't match the previously communicated hash contained inside the FetchArgs.
     pub async fn ingest(&self, fetch: Fetch) -> Result<(Node, CAHash, u64), FetcherError> {
         match fetch {
-            Fetch::URL(url, exp_hash) => {
+            Fetch::URL { url, exp_hash } => {
                 // Construct a AsyncRead reading from the data as its downloaded.
                 let mut r = self.download(url.clone()).await?;
 
@@ -199,7 +297,7 @@ where
 
                 // Copy the contents from the download reader to the blob writer.
                 // Calculate the digest of the file received, depending on the
-                // communicated expected hash (or sha256 if none provided).
+                // communicated expected hash algo (or sha256 if none provided).
                 let (actual_hash, blob_size) = match exp_hash
                     .as_ref()
                     .map(NixHash::algo)
@@ -243,7 +341,10 @@ where
                     blob_size,
                 ))
             }
-            Fetch::Tarball(url, exp_nar_sha256) => {
+            Fetch::Tarball {
+                url,
+                exp_nar_sha256,
+            } => {
                 // Construct a AsyncRead reading from the data as its downloaded.
                 let r = self.download(url.clone()).await?;
 
@@ -252,7 +353,7 @@ where
                 // Open the archive.
                 let archive = tokio_tar::Archive::new(r);
 
-                // Ingest the archive, get the root node
+                // Ingest the archive, get the root node.
                 let node = tvix_castore::import::archive::ingest_archive(
                     self.blob_service.clone(),
                     self.directory_service.clone(),
@@ -263,7 +364,7 @@ where
                 // If an expected NAR sha256 was provided, compare with the one
                 // calculated from our root node.
                 // Even if no expected NAR sha256 has been provided, we need
-                // the actual one later.
+                // the actual one to calculate the store path.
                 let (nar_size, actual_nar_sha256) = self
                     .nar_calculation_service
                     .calculate_nar(&node)
@@ -289,6 +390,156 @@ where
                     nar_size,
                 ))
             }
+            Fetch::NAR {
+                url,
+                hash: exp_hash,
+            } => {
+                // Construct a AsyncRead reading from the data as its downloaded.
+                let r = self.download(url.clone()).await?;
+
+                // Pop compression.
+                let r = DecompressedReader::new(r);
+
+                // Wrap the reader, calculating our own hash.
+                let mut hasher: Box<dyn DynDigest + Send> = match exp_hash.algo() {
+                    HashAlgo::Md5 => Box::new(Md5::new()),
+                    HashAlgo::Sha1 => Box::new(Sha1::new()),
+                    HashAlgo::Sha256 => Box::new(Sha256::new()),
+                    HashAlgo::Sha512 => Box::new(Sha512::new()),
+                };
+                let mut r = tokio_util::io::InspectReader::new(r, |b| {
+                    hasher.update(b);
+                });
+
+                // Ingest the NAR, get the root node.
+                let (root_node, _actual_nar_sha256, actual_nar_size) =
+                    tvix_store::nar::ingest_nar_and_hash(
+                        self.blob_service.clone(),
+                        self.directory_service.clone(),
+                        &mut r,
+                    )
+                    .await
+                    .map_err(|e| FetcherError::Io(std::io::Error::other(e.to_string())))?;
+
+                // finalize the hasher.
+                let actual_hash = {
+                    match exp_hash.algo() {
+                        HashAlgo::Md5 => {
+                            NixHash::Md5(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                        HashAlgo::Sha1 => {
+                            NixHash::Sha1(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                        HashAlgo::Sha256 => {
+                            NixHash::Sha256(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                        HashAlgo::Sha512 => {
+                            NixHash::Sha512(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                    }
+                };
+
+                // Ensure the hash matches.
+                if exp_hash != actual_hash {
+                    return Err(FetcherError::HashMismatch {
+                        url,
+                        wanted: exp_hash,
+                        got: actual_hash,
+                    });
+                }
+                Ok((
+                    root_node,
+                    // use a CAHash::Nar with the algo from the input.
+                    CAHash::Nar(exp_hash),
+                    actual_nar_size,
+                ))
+            }
+            Fetch::Executable {
+                url,
+                hash: exp_hash,
+            } => {
+                // Construct a AsyncRead reading from the data as its downloaded.
+                let mut r = self.download(url.clone()).await?;
+
+                // Construct a AsyncWrite to write into the BlobService.
+                let mut blob_writer = self.blob_service.open_write().await;
+
+                // Copy the contents from the download reader to the blob writer.
+                let file_size = tokio::io::copy(&mut r, &mut blob_writer).await?;
+                let blob_digest = blob_writer.close().await?;
+
+                // Render the NAR representation on-the-fly into a hash function with
+                // the same algo as our expected hash.
+                // We cannot do this upfront, as we don't know the actual size.
+                // FUTUREWORK: make opportunistic use of Content-Length header?
+
+                let w = tokio::io::sink();
+                // Construct the hash function.
+                let mut hasher: Box<dyn DynDigest + Send> = match exp_hash.algo() {
+                    HashAlgo::Md5 => Box::new(Md5::new()),
+                    HashAlgo::Sha1 => Box::new(Sha1::new()),
+                    HashAlgo::Sha256 => Box::new(Sha256::new()),
+                    HashAlgo::Sha512 => Box::new(Sha512::new()),
+                };
+
+                let mut nar_size: u64 = 0;
+                let mut w = InspectWriter::new(w, |d| {
+                    hasher.update(d);
+                    nar_size += d.len() as u64;
+                });
+
+                {
+                    let node = nix_compat::nar::writer::r#async::open(&mut w).await?;
+
+                    let blob_reader = self
+                        .blob_service
+                        .open_read(&blob_digest)
+                        .await?
+                        .expect("Tvix bug: just-uploaded blob not found");
+
+                    node.file(true, file_size, &mut BufReader::new(blob_reader))
+                        .await?;
+
+                    w.flush().await?;
+                }
+
+                // finalize the hasher.
+                let actual_hash = {
+                    match exp_hash.algo() {
+                        HashAlgo::Md5 => {
+                            NixHash::Md5(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                        HashAlgo::Sha1 => {
+                            NixHash::Sha1(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                        HashAlgo::Sha256 => {
+                            NixHash::Sha256(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                        HashAlgo::Sha512 => {
+                            NixHash::Sha512(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                    }
+                };
+
+                if exp_hash != actual_hash {
+                    return Err(FetcherError::HashMismatch {
+                        url,
+                        wanted: exp_hash,
+                        got: actual_hash,
+                    });
+                }
+
+                // Construct and return the FileNode describing the downloaded contents,
+                // make it executable.
+                let root_node = Node::File(FileNode {
+                    name: vec![].into(),
+                    digest: blob_digest.into(),
+                    size: file_size,
+                    executable: true,
+                });
+
+                Ok((root_node, CAHash::Nar(actual_hash), file_size))
+            }
             Fetch::Git() => todo!(),
         }
     }
@@ -306,7 +557,7 @@ where
         // Fetch file, return the (unnamed) (File)Node of its contents, ca hash and filesize.
         let (node, ca_hash, size) = self.ingest(fetch).await?;
 
-        // Calculate the store path to return later, which is done with the ca_hash.
+        // Calculate the store path to return, by calculating from ca_hash.
         let store_path = build_ca_path(name, &ca_hash, Vec::<String>::new(), false)?;
 
         // Rename the node name to match the Store Path.
@@ -315,14 +566,15 @@ where
         // If the resulting hash is not a CAHash::Nar, we also need to invoke
         // `calculate_nar` to calculate this representation, as it's required in
         // the [PathInfo].
+        // FUTUREWORK: allow ingest() to return multiple hashes, or have it feed
+        // nar_calculation_service too?
         let (nar_size, nar_sha256) = match &ca_hash {
-            CAHash::Flat(_nix_hash) => self
+            CAHash::Nar(NixHash::Sha256(nar_sha256)) => (size, *nar_sha256),
+            CAHash::Nar(_) | CAHash::Flat(_) => self
                 .nar_calculation_service
                 .calculate_nar(&node)
                 .await
                 .map_err(|e| FetcherError::Io(e.into()))?,
-            CAHash::Nar(NixHash::Sha256(nar_sha256)) => (size, *nar_sha256),
-            CAHash::Nar(_) => unreachable!("Tvix bug: fetch returned non-sha256 CAHash::Nar"),
             CAHash::Text(_) => unreachable!("Tvix bug: fetch returned CAHash::Text"),
         };
 
@@ -382,38 +634,82 @@ pub(crate) fn url_basename(s: &str) -> &str {
 #[cfg(test)]
 mod tests {
     mod fetch {
-        use nix_compat::nixbase32;
-
-        use crate::fetchers::Fetch;
-
         use super::super::*;
-
-        #[test]
-        fn fetchurl_store_path() {
-            let url = Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap();
-            let exp_hash = NixHash::Sha256(
-                nixbase32::decode_fixed("0nawkl04sj7psw6ikzay7kydj3dhd0fkwghcsf5rzaw4bmp4kbax")
-                    .unwrap(),
-            );
-
-            let fetch = Fetch::URL(url, Some(exp_hash));
+        use crate::fetchers::Fetch;
+        use nix_compat::{nixbase32, nixhash};
+        use rstest::rstest;
+
+        #[rstest]
+        #[case::url_no_hash(
+            Fetch::URL{
+                url: Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap(),
+                exp_hash: None,
+            },
+            None,
+            "notmuch-extract-patch"
+        )]
+        #[case::url_sha256(
+            Fetch::URL{
+                url: Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap(),
+                exp_hash: Some(nixhash::from_sri_str("sha256-Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=").unwrap()),
+            },
+            Some(StorePathRef::from_bytes(b"06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch").unwrap()),
+            "notmuch-extract-patch"
+        )]
+        #[case::url_custom_name(
+            Fetch::URL{
+                url: Url::parse("https://test.example/owo").unwrap(),
+                exp_hash: Some(nixhash::from_sri_str("sha256-Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=").unwrap()),
+            },
+            Some(StorePathRef::from_bytes(b"06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch").unwrap()),
+            "notmuch-extract-patch"
+        )]
+        #[case::nar_sha256(
+            Fetch::NAR{
+                url: Url::parse("https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz").unwrap(),
+                hash: nixhash::from_sri_str("sha256-oj6yfWKbcEerK8D9GdPJtIAOveNcsH1ztGeSARGypRA=").unwrap(),
+            },
+            Some(StorePathRef::from_bytes(b"b40vjphshq4fdgv8s3yrp0bdlafi4920-0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz").unwrap()),
+            "0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz"
+        )]
+        #[case::nar_sha1(
+            Fetch::NAR{
+                url: Url::parse("https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz").unwrap(),
+                hash: nixhash::from_sri_str("sha1-F/fMsgwkXF8fPCg1v9zPZ4yOFIA=").unwrap(),
+            },
+            Some(StorePathRef::from_bytes(b"8kx7fdkdbzs4fkfb57xq0cbhs20ymq2n-0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz").unwrap()),
+            "0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz"
+        )]
+        #[case::nar_sha1(
+            Fetch::Executable{
+                url: Url::parse("https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz").unwrap(),
+                hash: nixhash::from_sri_str("sha1-NKNeU1csW5YJ4lCeWH3Z/apppNU=").unwrap(),
+            },
+            Some(StorePathRef::from_bytes(b"y92hm2xfk1009hrq0ix80j4m5k4j4w21-0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz").unwrap()),
+            "0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz"
+        )]
+        fn fetch_store_path(
+            #[case] fetch: Fetch,
+            #[case] exp_path: Option<StorePathRef>,
+            #[case] name: &str,
+        ) {
             assert_eq!(
-                "06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch",
-                &fetch
-                    .store_path("notmuch-extract-patch")
-                    .unwrap()
-                    .unwrap()
-                    .to_string(),
-            )
+                exp_path,
+                fetch.store_path(name).expect("invalid name"),
+                "unexpected calculated store path"
+            );
         }
 
         #[test]
         fn fetch_tarball_store_path() {
             let url = Url::parse("https://github.com/NixOS/nixpkgs/archive/91050ea1e57e50388fa87a3302ba12d188ef723a.tar.gz").unwrap();
-            let exp_nixbase32 =
+            let exp_sha256 =
                 nixbase32::decode_fixed("1hf6cgaci1n186kkkjq106ryf8mmlq9vnwgfwh625wa8hfgdn4dm")
                     .unwrap();
-            let fetch = Fetch::Tarball(url, Some(exp_nixbase32));
+            let fetch = Fetch::Tarball {
+                url,
+                exp_nar_sha256: Some(exp_sha256),
+            };
 
             assert_eq!(
                 "7adgvk5zdfq4pwrhsm3n9lzypb12gw0g-source",
diff --git a/tvix/glue/src/fetchurl.rs b/tvix/glue/src/fetchurl.rs
new file mode 100644
index 0000000000..9f57868b19
--- /dev/null
+++ b/tvix/glue/src/fetchurl.rs
@@ -0,0 +1,82 @@
+//! This contains the code translating from a `builtin:derivation` [Derivation]
+//! to a [Fetch].
+use crate::fetchers::Fetch;
+use nix_compat::{derivation::Derivation, nixhash::CAHash};
+use tracing::instrument;
+use url::Url;
+
+/// Takes a derivation produced by a call to `builtin:fetchurl` and returns the
+/// synthesized [Fetch] for it, as well as the name.
+#[instrument]
+pub(crate) fn fetchurl_derivation_to_fetch(drv: &Derivation) -> Result<(String, Fetch), Error> {
+    if drv.builder != "builtin:fetchurl" {
+        return Err(Error::BuilderInvalid);
+    }
+    if !drv.arguments.is_empty() {
+        return Err(Error::ArgumentsInvalud);
+    }
+    if drv.system != "builtin" {
+        return Err(Error::SystemInvalid);
+    }
+
+    // ensure this is a fixed-output derivation
+    if drv.outputs.len() != 1 {
+        return Err(Error::NoFOD);
+    }
+    let out_output = &drv.outputs.get("out").ok_or(Error::NoFOD)?;
+    let ca_hash = out_output.ca_hash.clone().ok_or(Error::NoFOD)?;
+
+    let name: String = drv
+        .environment
+        .get("name")
+        .ok_or(Error::NameMissing)?
+        .to_owned()
+        .try_into()
+        .map_err(|_| Error::NameInvalid)?;
+
+    let url: Url = std::str::from_utf8(drv.environment.get("url").ok_or(Error::URLMissing)?)
+        .map_err(|_| Error::URLInvalid)?
+        .parse()
+        .map_err(|_| Error::URLInvalid)?;
+
+    match ca_hash {
+        CAHash::Flat(hash) => {
+            return Ok((
+                name,
+                Fetch::URL {
+                    url,
+                    exp_hash: Some(hash),
+                },
+            ))
+        }
+        CAHash::Nar(hash) => {
+            if drv.environment.get("executable").map(|v| v.as_slice()) == Some(b"1") {
+                Ok((name, Fetch::Executable { url, hash }))
+            } else {
+                Ok((name, Fetch::NAR { url, hash }))
+            }
+        }
+        // you can't construct derivations containing this
+        CAHash::Text(_) => panic!("Tvix bug: got CaHash::Text in drv"),
+    }
+}
+
+#[derive(Debug, thiserror::Error)]
+pub(crate) enum Error {
+    #[error("Invalid builder")]
+    BuilderInvalid,
+    #[error("invalid arguments")]
+    ArgumentsInvalud,
+    #[error("Invalid system")]
+    SystemInvalid,
+    #[error("Derivation is not fixed-output")]
+    NoFOD,
+    #[error("Missing URL")]
+    URLMissing,
+    #[error("Invalid URL")]
+    URLInvalid,
+    #[error("Missing Name")]
+    NameMissing,
+    #[error("Name invalid")]
+    NameInvalid,
+}
diff --git a/tvix/glue/src/known_paths.rs b/tvix/glue/src/known_paths.rs
index 290c9d5b69..edc57c38f2 100644
--- a/tvix/glue/src/known_paths.rs
+++ b/tvix/glue/src/known_paths.rs
@@ -133,7 +133,7 @@ impl KnownPaths {
 
 #[cfg(test)]
 mod tests {
-    use nix_compat::{derivation::Derivation, nixbase32, nixhash::NixHash, store_path::StorePath};
+    use nix_compat::{derivation::Derivation, nixbase32, nixhash, store_path::StorePath};
     use url::Url;
 
     use crate::fetchers::Fetch;
@@ -160,16 +160,16 @@ mod tests {
         static ref FOO_OUT_PATH: StorePath =
             StorePath::from_bytes(b"fhaj6gmwns62s6ypkcldbaj2ybvkhx3p-foo").expect("must parse");
 
-        static ref FETCH_URL : Fetch = Fetch::URL(
-            Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap(),
-            Some(NixHash::Sha256(nixbase32::decode_fixed("0nawkl04sj7psw6ikzay7kydj3dhd0fkwghcsf5rzaw4bmp4kbax").unwrap()))
-        );
+        static ref FETCH_URL : Fetch = Fetch::URL{
+            url: Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap(),
+            exp_hash: Some(nixhash::from_sri_str("sha256-Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=").unwrap())
+        };
         static ref FETCH_URL_OUT_PATH: StorePath = StorePath::from_bytes(b"06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch").unwrap();
 
-        static ref FETCH_TARBALL : Fetch = Fetch::Tarball(
-            Url::parse("https://github.com/NixOS/nixpkgs/archive/91050ea1e57e50388fa87a3302ba12d188ef723a.tar.gz").unwrap(),
-            Some(nixbase32::decode_fixed("1hf6cgaci1n186kkkjq106ryf8mmlq9vnwgfwh625wa8hfgdn4dm").unwrap())
-        );
+        static ref FETCH_TARBALL : Fetch = Fetch::Tarball{
+            url: Url::parse("https://github.com/NixOS/nixpkgs/archive/91050ea1e57e50388fa87a3302ba12d188ef723a.tar.gz").unwrap(),
+            exp_nar_sha256: Some(nixbase32::decode_fixed("1hf6cgaci1n186kkkjq106ryf8mmlq9vnwgfwh625wa8hfgdn4dm").unwrap())
+        };
         static ref FETCH_TARBALL_OUT_PATH: StorePath = StorePath::from_bytes(b"7adgvk5zdfq4pwrhsm3n9lzypb12gw0g-source").unwrap();
     }
 
@@ -267,22 +267,6 @@ mod tests {
                 .unwrap()
                 .to_owned()
         );
-
-        // We should be able to get these fetches out, when asking for their out path.
-        let (got_name, got_fetch) = known_paths
-            .get_fetch_for_output_path(&FETCH_URL_OUT_PATH)
-            .expect("must be some");
-
-        assert_eq!("notmuch-extract-patch", got_name);
-        assert_eq!(FETCH_URL.clone(), got_fetch);
-
-        // … multiple times.
-        let (got_name, got_fetch) = known_paths
-            .get_fetch_for_output_path(&FETCH_URL_OUT_PATH)
-            .expect("must be some");
-
-        assert_eq!("notmuch-extract-patch", got_name);
-        assert_eq!(FETCH_URL.clone(), got_fetch);
     }
 
     // TODO: add test panicking about missing digest
diff --git a/tvix/glue/src/lib.rs b/tvix/glue/src/lib.rs
index 2e5a3be103..a5dbdb8742 100644
--- a/tvix/glue/src/lib.rs
+++ b/tvix/glue/src/lib.rs
@@ -6,6 +6,8 @@ pub mod tvix_build;
 pub mod tvix_io;
 pub mod tvix_store_io;
 
+mod fetchurl;
+
 #[cfg(test)]
 mod tests;
 
diff --git a/tvix/glue/src/tests/empty-file b/tvix/glue/src/tests/empty-file
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/tvix/glue/src/tests/empty-file
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-context-introspection.nix b/tvix/glue/src/tests/tvix_tests/eval-okay-context-introspection.nix
index ecd8ab0073..e5719e00c3 100644
--- a/tvix/glue/src/tests/tvix_tests/eval-okay-context-introspection.nix
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-context-introspection.nix
@@ -71,7 +71,7 @@ in
   (builtins.hasAttr "allOutputs" (builtins.getContext drv.drvPath)."${builtins.unsafeDiscardStringContext drv.drvPath}")
   (legit-context == desired-context) # FIXME(raitobezarius): this should not use `builtins.seq`, this is a consequence of excessive laziness of Tvix, I believe.
   (reconstructed-path == combo-path)
-  # Those are too slow?
+  # These still fail with an internal error
   # (etaRule' "foo")
   # (etaRule' combo-path)
   (etaRule "foo")
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.exp b/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.exp
index 37a04d577c..50d8e3574b 100644
--- a/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.exp
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.exp
@@ -1 +1 @@
-[ /nix/store/y0r1p1cqmlvm0yqkz3gxvkc1p8kg2sz8-null /nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch /nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch /nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch ]
+[ /nix/store/y0r1p1cqmlvm0yqkz3gxvkc1p8kg2sz8-null /nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch /nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch /nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch "/nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch" "/nix/store/b40vjphshq4fdgv8s3yrp0bdlafi4920-0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz" "/nix/store/8kx7fdkdbzs4fkfb57xq0cbhs20ymq2n-0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz" "/nix/store/y92hm2xfk1009hrq0ix80j4m5k4j4w21-0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz" ]
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.nix b/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.nix
index 8a39101525..a3e9c54869 100644
--- a/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.nix
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.nix
@@ -22,4 +22,44 @@
     name = "notmuch-extract-patch";
     sha256 = "sha256-Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=";
   })
+
+  # The following tests use <nix/fetchurl.nix>.
+  # This is a piece of Nix code producing a "fake derivation" which gets
+  # handled by a "custom builder" that does the actual fetching.
+  # We access `.outPath` here, as the current string output of a Derivation
+  # still differs from the way nix presents it.
+  # It behaves similar to builtins.fetchurl, except it requires a hash to be
+  # provided upfront.
+  # If `unpack` is set to true, it can unpack NAR files (decompressing if
+  # necessary)
+  # If `executable` is set to true, it will place the fetched file at the root,
+  # but make it executable, and the hash is on the NAR representation.
+
+  # Fetch a URL.
+  (import <nix/fetchurl.nix> {
+    url = "https://test.example/owo";
+    name = "notmuch-extract-patch";
+    sha256 = "Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=";
+  }).outPath
+
+  # Fetch a NAR and unpack it, specifying the sha256 of its NAR representation.
+  (import <nix/fetchurl.nix> {
+    url = "https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz";
+    sha256 = "sha256-oj6yfWKbcEerK8D9GdPJtIAOveNcsH1ztGeSARGypRA=";
+    unpack = true;
+  }).outPath
+
+  # Fetch a NAR and unpack it, specifying its *sha1* of its NAR representation.
+  (import <nix/fetchurl.nix> {
+    url = "https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz";
+    hash = "sha1-F/fMsgwkXF8fPCg1v9zPZ4yOFIA=";
+    unpack = true;
+  }).outPath
+
+  # Fetch a URL, specifying the *sha1* of a NAR describing it as executable at the root.
+  (import <nix/fetchurl.nix> {
+    url = "https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz";
+    hash = "sha1-NKNeU1csW5YJ4lCeWH3Z/apppNU=";
+    executable = true;
+  }).outPath
 ]
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-storePath.exp b/tvix/glue/src/tests/tvix_tests/eval-okay-storePath.exp
new file mode 100644
index 0000000000..e7d20f6631
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-storePath.exp
@@ -0,0 +1 @@
+{ contextMatches = true; hasContext = true; }
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-storePath.nix b/tvix/glue/src/tests/tvix_tests/eval-okay-storePath.nix
new file mode 100644
index 0000000000..99205cb9e0
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-storePath.nix
@@ -0,0 +1,9 @@
+let
+  path = builtins.unsafeDiscardStringContext "${../empty-file}";
+  storePath = builtins.storePath path;
+  context = builtins.getContext storePath;
+in
+{
+  hasContext = builtins.hasContext storePath;
+  contextMatches = context == { "${path}" = { path = true; }; };
+}
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.exp b/tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.exp
new file mode 100644
index 0000000000..e9600ecdad
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.exp
@@ -0,0 +1 @@
+[ { "/nix/store/y1s2fiq89v2h9vkb38w508ir20dwv6v2-test.drv" = { allOutputs = true; }; } false ]
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.nix b/tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.nix
new file mode 100644
index 0000000000..933aa46022
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.nix
@@ -0,0 +1,14 @@
+[
+  # builtins.toXML retains context where there is.
+  (builtins.getContext (builtins.toXML {
+    inherit (derivation {
+      name = "test";
+      builder = "/bin/sh";
+      system = builtins.currentSystem;
+    }) drvPath;
+  }))
+
+  # this should have no context.
+  (builtins.hasContext
+    (builtins.toXML { }))
+]
diff --git a/tvix/glue/src/tvix_store_io.rs b/tvix/glue/src/tvix_store_io.rs
index 7b8ef3ff0a..489f020436 100644
--- a/tvix/glue/src/tvix_store_io.rs
+++ b/tvix/glue/src/tvix_store_io.rs
@@ -1,5 +1,4 @@
 //! This module provides an implementation of EvalIO talking to tvix-store.
-
 use bytes::Bytes;
 use futures::{StreamExt, TryStreamExt};
 use nix_compat::nixhash::NixHash;
@@ -14,7 +13,8 @@ use std::{
     sync::Arc,
 };
 use tokio_util::io::SyncIoBridge;
-use tracing::{error, info, instrument, warn, Level};
+use tracing::{error, instrument, warn, Level, Span};
+use tracing_indicatif::span_ext::IndicatifSpanExt;
 use tvix_build::buildservice::BuildService;
 use tvix_castore::proto::node::Node;
 use tvix_eval::{EvalIO, FileType, StdIO};
@@ -106,7 +106,7 @@ impl TvixStoreIO {
     ///
     /// In case there is no PathInfo yet, this means we need to build it
     /// (which currently is stubbed out still).
-    #[instrument(skip(self, store_path), fields(store_path=%store_path), ret(level = Level::TRACE), err)]
+    #[instrument(skip(self, store_path), fields(store_path=%store_path, indicatif.pb_show=1), ret(level = Level::TRACE), err)]
     async fn store_path_to_node(
         &self,
         store_path: &StorePath,
@@ -138,7 +138,8 @@ impl TvixStoreIO {
                 // The store path doesn't exist yet, so we need to fetch or build it.
                 // We check for fetches first, as we might have both native
                 // fetchers and FODs in KnownPaths, and prefer the former.
-
+                // This will also find [Fetch] synthesized from
+                // `builtin:fetchurl` Derivations.
                 let maybe_fetch = self
                     .known_paths
                     .borrow()
@@ -146,7 +147,6 @@ impl TvixStoreIO {
 
                 match maybe_fetch {
                     Some((name, fetch)) => {
-                        info!(?fetch, "triggering lazy fetch");
                         let (sp, root_node) = self
                             .fetcher
                             .ingest_and_persist(&name, fetch)
@@ -156,9 +156,9 @@ impl TvixStoreIO {
                         })?;
 
                         debug_assert_eq!(
-                            sp.to_string(),
-                            store_path.to_string(),
-                            "store path returned from fetcher should match"
+                            sp.to_absolute_path(),
+                            store_path.as_ref().to_absolute_path(),
+                            "store path returned from fetcher must match store path we have in fetchers"
                         );
 
                         root_node
@@ -179,14 +179,16 @@ impl TvixStoreIO {
                                 }
                             }
                         };
-
-                        warn!("triggering build");
+                        let span = Span::current();
+                        span.pb_start();
+                        span.pb_set_style(&tvix_tracing::PB_SPINNER_STYLE);
+                        span.pb_set_message(&format!("⏳Waiting for inputs {}", &store_path));
 
                         // derivation_to_build_request needs castore nodes for all inputs.
                         // Provide them, which means, here is where we recursively build
                         // all dependencies.
                         #[allow(clippy::mutable_key_type)]
-                        let input_nodes: BTreeSet<Node> =
+                        let mut input_nodes: BTreeSet<Node> =
                             futures::stream::iter(drv.input_derivations.iter())
                                 .map(|(input_drv_path, output_names)| {
                                     // look up the derivation object
@@ -236,9 +238,34 @@ impl TvixStoreIO {
                                     )
                                 })
                                 .flatten()
-                                .buffer_unordered(10) // TODO: make configurable
+                                .buffer_unordered(
+                                    1, /* TODO: increase again once we prevent redundant fetches */
+                                ) // TODO: make configurable
+                                .try_collect()
+                                .await?;
+
+                        // add input sources
+                        // FUTUREWORK: merge these who things together
+                        #[allow(clippy::mutable_key_type)]
+                        let input_nodes_input_sources: BTreeSet<Node> =
+                            futures::stream::iter(drv.input_sources.iter())
+                                .then(|input_source| {
+                                    Box::pin(async {
+                                        let node = self
+                                            .store_path_to_node(input_source, Path::new(""))
+                                            .await?;
+                                        if let Some(node) = node {
+                                            Ok(node)
+                                        } else {
+                                            Err(io::Error::other("no node produced"))
+                                        }
+                                    })
+                                })
                                 .try_collect()
                                 .await?;
+                        input_nodes.extend(input_nodes_input_sources);
+
+                        span.pb_set_message(&format!("🔨Building {}", &store_path));
 
                         // TODO: check if input sources are sufficiently dealth with,
                         // I think yes, they must be imported into the store by other
@@ -255,7 +282,7 @@ impl TvixStoreIO {
                             .await
                             .map_err(|e| std::io::Error::new(io::ErrorKind::Other, e))?;
 
-                        // TODO: refscan?
+                        // TODO: refscan
 
                         // For each output, insert a PathInfo.
                         for output in &build_result.outputs {
@@ -421,7 +448,7 @@ impl EvalIO for TvixStoreIO {
         {
             if self
                 .tokio_handle
-                .block_on(async { self.store_path_to_node(&store_path, &sub_path).await })?
+                .block_on(self.store_path_to_node(&store_path, &sub_path))?
                 .is_some()
             {
                 Ok(true)
diff --git a/tvix/nar-bridge/.gitignore b/tvix/nar-bridge-go/.gitignore
index d70e1f8120..d70e1f8120 100644
--- a/tvix/nar-bridge/.gitignore
+++ b/tvix/nar-bridge-go/.gitignore
diff --git a/tvix/nar-bridge/README.md b/tvix/nar-bridge-go/README.md
index b14ee7af7b..81431daf38 100644
--- a/tvix/nar-bridge/README.md
+++ b/tvix/nar-bridge-go/README.md
@@ -1,4 +1,4 @@
-# //tvix/nar-bridge
+# //tvix/nar-bridge-go
 
 This exposes a HTTP Binary cache interface (GET/HEAD/PUT requests) for a `tvix-
 store`.
diff --git a/tvix/nar-bridge/cmd/nar-bridge-http/main.go b/tvix/nar-bridge-go/cmd/nar-bridge-http/main.go
index 171ea7f5bd..cf2aaf4901 100644
--- a/tvix/nar-bridge/cmd/nar-bridge-http/main.go
+++ b/tvix/nar-bridge-go/cmd/nar-bridge-http/main.go
@@ -14,7 +14,7 @@ import (
 	"google.golang.org/grpc/credentials/insecure"
 
 	castorev1pb "code.tvl.fyi/tvix/castore-go"
-	narBridgeHttp "code.tvl.fyi/tvix/nar-bridge/pkg/http"
+	narBridgeHttp "code.tvl.fyi/tvix/nar-bridge-go/pkg/http"
 	storev1pb "code.tvl.fyi/tvix/store-go"
 	log "github.com/sirupsen/logrus"
 )
@@ -47,7 +47,7 @@ func main() {
 			log.Fatal("failed to read build info")
 		}
 
-		shutdown, err := setupOpenTelemetry(ctx, "nar-bridge", buildInfo.Main.Version)
+		shutdown, err := setupOpenTelemetry(ctx, "nar-bridge-http", buildInfo.Main.Version)
 		if err != nil {
 			log.WithError(err).Fatal("failed to setup OpenTelemetry")
 		}
diff --git a/tvix/nar-bridge/cmd/nar-bridge-http/otel.go b/tvix/nar-bridge-go/cmd/nar-bridge-http/otel.go
index c446c6ec1a..c446c6ec1a 100644
--- a/tvix/nar-bridge/cmd/nar-bridge-http/otel.go
+++ b/tvix/nar-bridge-go/cmd/nar-bridge-http/otel.go
diff --git a/tvix/nar-bridge/default.nix b/tvix/nar-bridge-go/default.nix
index c0247f279f..303d9c5041 100644
--- a/tvix/nar-bridge/default.nix
+++ b/tvix/nar-bridge-go/default.nix
@@ -3,7 +3,7 @@
 { depot, pkgs, lib, ... }:
 
 pkgs.buildGoModule {
-  name = "nar-bridge";
+  name = "nar-bridge-go";
   src = depot.third_party.gitignoreSource ./.;
 
   vendorHash = "sha256-7jugbC5sEGhppjiZgnoLP5A6kQSaHK9vE6cXVZBG22s=";
diff --git a/tvix/nar-bridge/go.mod b/tvix/nar-bridge-go/go.mod
index deb6943e23..3aa0694ff7 100644
--- a/tvix/nar-bridge/go.mod
+++ b/tvix/nar-bridge-go/go.mod
@@ -1,4 +1,4 @@
-module code.tvl.fyi/tvix/nar-bridge
+module code.tvl.fyi/tvix/nar-bridge-go
 
 require (
 	code.tvl.fyi/tvix/castore-go v0.0.0-20231105151352-990d6ba2175e
diff --git a/tvix/nar-bridge/go.sum b/tvix/nar-bridge-go/go.sum
index 39f77b9061..39f77b9061 100644
--- a/tvix/nar-bridge/go.sum
+++ b/tvix/nar-bridge-go/go.sum
diff --git a/tvix/nar-bridge/pkg/http/nar_get.go b/tvix/nar-bridge-go/pkg/http/nar_get.go
index 75797f8da9..75797f8da9 100644
--- a/tvix/nar-bridge/pkg/http/nar_get.go
+++ b/tvix/nar-bridge-go/pkg/http/nar_get.go
diff --git a/tvix/nar-bridge/pkg/http/nar_put.go b/tvix/nar-bridge-go/pkg/http/nar_put.go
index fdfa20f9c3..96bdd38b70 100644
--- a/tvix/nar-bridge/pkg/http/nar_put.go
+++ b/tvix/nar-bridge-go/pkg/http/nar_put.go
@@ -7,7 +7,7 @@ import (
 	"net/http"
 
 	castorev1pb "code.tvl.fyi/tvix/castore-go"
-	"code.tvl.fyi/tvix/nar-bridge/pkg/importer"
+	"code.tvl.fyi/tvix/nar-bridge-go/pkg/importer"
 	"github.com/go-chi/chi/v5"
 	mh "github.com/multiformats/go-multihash/core"
 	nixhash "github.com/nix-community/go-nix/pkg/hash"
diff --git a/tvix/nar-bridge/pkg/http/narinfo.go b/tvix/nar-bridge-go/pkg/http/narinfo.go
index e5b99a9505..e5b99a9505 100644
--- a/tvix/nar-bridge/pkg/http/narinfo.go
+++ b/tvix/nar-bridge-go/pkg/http/narinfo.go
diff --git a/tvix/nar-bridge/pkg/http/narinfo_get.go b/tvix/nar-bridge-go/pkg/http/narinfo_get.go
index d43cb58078..d43cb58078 100644
--- a/tvix/nar-bridge/pkg/http/narinfo_get.go
+++ b/tvix/nar-bridge-go/pkg/http/narinfo_get.go
diff --git a/tvix/nar-bridge/pkg/http/narinfo_put.go b/tvix/nar-bridge-go/pkg/http/narinfo_put.go
index fd588bec86..0e2ae989c0 100644
--- a/tvix/nar-bridge/pkg/http/narinfo_put.go
+++ b/tvix/nar-bridge-go/pkg/http/narinfo_put.go
@@ -3,7 +3,7 @@ package http
 import (
 	"net/http"
 
-	"code.tvl.fyi/tvix/nar-bridge/pkg/importer"
+	"code.tvl.fyi/tvix/nar-bridge-go/pkg/importer"
 	"github.com/go-chi/chi/v5"
 	"github.com/nix-community/go-nix/pkg/narinfo"
 	"github.com/nix-community/go-nix/pkg/nixbase32"
diff --git a/tvix/nar-bridge/pkg/http/server.go b/tvix/nar-bridge-go/pkg/http/server.go
index fbcb20be18..fbcb20be18 100644
--- a/tvix/nar-bridge/pkg/http/server.go
+++ b/tvix/nar-bridge-go/pkg/http/server.go
diff --git a/tvix/nar-bridge/pkg/http/util.go b/tvix/nar-bridge-go/pkg/http/util.go
index 60febea1f4..60febea1f4 100644
--- a/tvix/nar-bridge/pkg/http/util.go
+++ b/tvix/nar-bridge-go/pkg/http/util.go
diff --git a/tvix/nar-bridge/pkg/importer/blob_upload.go b/tvix/nar-bridge-go/pkg/importer/blob_upload.go
index c1255dd3ad..c1255dd3ad 100644
--- a/tvix/nar-bridge/pkg/importer/blob_upload.go
+++ b/tvix/nar-bridge-go/pkg/importer/blob_upload.go
diff --git a/tvix/nar-bridge/pkg/importer/counting_writer.go b/tvix/nar-bridge-go/pkg/importer/counting_writer.go
index d003a4b11b..d003a4b11b 100644
--- a/tvix/nar-bridge/pkg/importer/counting_writer.go
+++ b/tvix/nar-bridge-go/pkg/importer/counting_writer.go
diff --git a/tvix/nar-bridge/pkg/importer/directory_upload.go b/tvix/nar-bridge-go/pkg/importer/directory_upload.go
index 117f442fa5..117f442fa5 100644
--- a/tvix/nar-bridge/pkg/importer/directory_upload.go
+++ b/tvix/nar-bridge-go/pkg/importer/directory_upload.go
diff --git a/tvix/nar-bridge/pkg/importer/gen_pathinfo.go b/tvix/nar-bridge-go/pkg/importer/gen_pathinfo.go
index bdc298a9a3..bdc298a9a3 100644
--- a/tvix/nar-bridge/pkg/importer/gen_pathinfo.go
+++ b/tvix/nar-bridge-go/pkg/importer/gen_pathinfo.go
diff --git a/tvix/nar-bridge/pkg/importer/importer.go b/tvix/nar-bridge-go/pkg/importer/importer.go
index fce6c5f293..fce6c5f293 100644
--- a/tvix/nar-bridge/pkg/importer/importer.go
+++ b/tvix/nar-bridge-go/pkg/importer/importer.go
diff --git a/tvix/nar-bridge/pkg/importer/importer_test.go b/tvix/nar-bridge-go/pkg/importer/importer_test.go
index 8ff63b9257..313677084f 100644
--- a/tvix/nar-bridge/pkg/importer/importer_test.go
+++ b/tvix/nar-bridge-go/pkg/importer/importer_test.go
@@ -9,7 +9,7 @@ import (
 	"testing"
 
 	castorev1pb "code.tvl.fyi/tvix/castore-go"
-	"code.tvl.fyi/tvix/nar-bridge/pkg/importer"
+	"code.tvl.fyi/tvix/nar-bridge-go/pkg/importer"
 	"github.com/stretchr/testify/require"
 )
 
diff --git a/tvix/nar-bridge/pkg/importer/roundtrip_test.go b/tvix/nar-bridge-go/pkg/importer/roundtrip_test.go
index 6d6fcb9ee2..c50d332d85 100644
--- a/tvix/nar-bridge/pkg/importer/roundtrip_test.go
+++ b/tvix/nar-bridge-go/pkg/importer/roundtrip_test.go
@@ -11,7 +11,7 @@ import (
 	"testing"
 
 	castorev1pb "code.tvl.fyi/tvix/castore-go"
-	"code.tvl.fyi/tvix/nar-bridge/pkg/importer"
+	"code.tvl.fyi/tvix/nar-bridge-go/pkg/importer"
 	storev1pb "code.tvl.fyi/tvix/store-go"
 	"github.com/stretchr/testify/require"
 )
diff --git a/tvix/nar-bridge/pkg/importer/util_test.go b/tvix/nar-bridge-go/pkg/importer/util_test.go
index 06353cf582..06353cf582 100644
--- a/tvix/nar-bridge/pkg/importer/util_test.go
+++ b/tvix/nar-bridge-go/pkg/importer/util_test.go
diff --git a/tvix/nar-bridge/testdata/emptydirectory.nar b/tvix/nar-bridge-go/testdata/emptydirectory.nar
index baba558622..baba558622 100644
--- a/tvix/nar-bridge/testdata/emptydirectory.nar
+++ b/tvix/nar-bridge-go/testdata/emptydirectory.nar
Binary files differdiff --git a/tvix/nar-bridge/testdata/nar_1094wph9z4nwlgvsd53abfz8i117ykiv5dwnq9nnhz846s7xqd7d.nar b/tvix/nar-bridge-go/testdata/nar_1094wph9z4nwlgvsd53abfz8i117ykiv5dwnq9nnhz846s7xqd7d.nar
index 6cb0b16e5d..6cb0b16e5d 100644
--- a/tvix/nar-bridge/testdata/nar_1094wph9z4nwlgvsd53abfz8i117ykiv5dwnq9nnhz846s7xqd7d.nar
+++ b/tvix/nar-bridge-go/testdata/nar_1094wph9z4nwlgvsd53abfz8i117ykiv5dwnq9nnhz846s7xqd7d.nar
Binary files differdiff --git a/tvix/nar-bridge/testdata/onebyteexecutable.nar b/tvix/nar-bridge-go/testdata/onebyteexecutable.nar
index 6868219666..6868219666 100644
--- a/tvix/nar-bridge/testdata/onebyteexecutable.nar
+++ b/tvix/nar-bridge-go/testdata/onebyteexecutable.nar
Binary files differdiff --git a/tvix/nar-bridge/testdata/onebyteregular.nar b/tvix/nar-bridge-go/testdata/onebyteregular.nar
index b8c94932bf..b8c94932bf 100644
--- a/tvix/nar-bridge/testdata/onebyteregular.nar
+++ b/tvix/nar-bridge-go/testdata/onebyteregular.nar
Binary files differdiff --git a/tvix/nar-bridge/testdata/popdirectories.nar b/tvix/nar-bridge-go/testdata/popdirectories.nar
index 74313aca52..74313aca52 100644
--- a/tvix/nar-bridge/testdata/popdirectories.nar
+++ b/tvix/nar-bridge-go/testdata/popdirectories.nar
Binary files differdiff --git a/tvix/nar-bridge/testdata/symlink.nar b/tvix/nar-bridge-go/testdata/symlink.nar
index 7990e4ad5b..7990e4ad5b 100644
--- a/tvix/nar-bridge/testdata/symlink.nar
+++ b/tvix/nar-bridge-go/testdata/symlink.nar
Binary files differdiff --git a/tvix/nix-compat/Cargo.toml b/tvix/nix-compat/Cargo.toml
index 876ac3ecad..91fd19475a 100644
--- a/tvix/nix-compat/Cargo.toml
+++ b/tvix/nix-compat/Cargo.toml
@@ -4,7 +4,7 @@ version = "0.1.0"
 edition = "2021"
 
 [features]
-# async NAR writer
+# async NAR writer. Also needs the `wire` feature.
 async = ["tokio"]
 # code emitting low-level packets used in the daemon protocol.
 wire = ["tokio", "pin-project-lite"]
diff --git a/tvix/nix-compat/build.rs b/tvix/nix-compat/build.rs
new file mode 100644
index 0000000000..c66b970162
--- /dev/null
+++ b/tvix/nix-compat/build.rs
@@ -0,0 +1,5 @@
+fn main() {
+    // Pick up new test case files
+    // https://github.com/la10736/rstest/issues/256
+    println!("cargo:rerun-if-changed=src/derivation/tests/derivation_tests")
+}
diff --git a/tvix/nix-compat/default.nix b/tvix/nix-compat/default.nix
index 9df76e12fc..08b053b77d 100644
--- a/tvix/nix-compat/default.nix
+++ b/tvix/nix-compat/default.nix
@@ -1,7 +1,11 @@
-{ depot, ... }:
+{ depot, lib, ... }:
 
-depot.tvix.crates.workspaceMembers.nix-compat.build.override {
+(depot.tvix.crates.workspaceMembers.nix-compat.build.override {
   runTests = true;
-  # make sure we also enable async here, so run the tests behind that feature flag.
-  features = [ "default" "async" "wire" ];
-}
+}).overrideAttrs (old: rec {
+  meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+  passthru = depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = [ "async" "wire" ];
+  };
+})
diff --git a/tvix/nix-compat/src/nar/reader/mod.rs b/tvix/nix-compat/src/nar/reader/mod.rs
index 9e9237ead3..7e9143c8f3 100644
--- a/tvix/nix-compat/src/nar/reader/mod.rs
+++ b/tvix/nix-compat/src/nar/reader/mod.rs
@@ -16,7 +16,7 @@ use std::marker::PhantomData;
 // Required reading for understanding this module.
 use crate::nar::wire;
 
-#[cfg(feature = "async")]
+#[cfg(all(feature = "async", feature = "wire"))]
 pub mod r#async;
 
 mod read;
diff --git a/tvix/nix-compat/src/nar/wire/mod.rs b/tvix/nix-compat/src/nar/wire/mod.rs
index 9e99b530ce..26da04e67c 100644
--- a/tvix/nix-compat/src/nar/wire/mod.rs
+++ b/tvix/nix-compat/src/nar/wire/mod.rs
@@ -97,7 +97,7 @@ const TOK_PAD_PAR: [u8; 24] = *b"\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0)\0\0\0\0\0\0
 #[derive(Debug)]
 pub(crate) enum PadPar {}
 
-#[cfg(feature = "async")]
+#[cfg(all(feature = "async", feature = "wire"))]
 impl crate::wire::reader::Tag for PadPar {
     const PATTERN: &'static [u8] = &TOK_PAD_PAR;
 
diff --git a/tvix/nix-compat/src/wire/bytes/reader/mod.rs b/tvix/nix-compat/src/wire/bytes/reader/mod.rs
index 6bd376c06f..c0227f4e6c 100644
--- a/tvix/nix-compat/src/wire/bytes/reader/mod.rs
+++ b/tvix/nix-compat/src/wire/bytes/reader/mod.rs
@@ -414,6 +414,7 @@ mod tests {
     }
 
     /// Read the trailer immediately if there is no payload.
+    #[cfg(feature = "async")]
     #[tokio::test]
     async fn read_trailer_immediately() {
         use crate::nar::wire::PadPar;
@@ -431,6 +432,7 @@ mod tests {
     }
 
     /// Read the trailer even if we only read the exact payload size.
+    #[cfg(feature = "async")]
     #[tokio::test]
     async fn read_exact_trailer() {
         use crate::nar::wire::PadPar;
diff --git a/tvix/shell.nix b/tvix/shell.nix
index f0d8ab1657..947cda269b 100644
--- a/tvix/shell.nix
+++ b/tvix/shell.nix
@@ -16,6 +16,19 @@
             ./nixpkgs/cbtemulator-uds.patch
           ];
         });
+
+        # macFUSE bump containing fix for https://github.com/osxfuse/osxfuse/issues/974
+        # https://github.com/NixOS/nixpkgs/pull/320197
+        fuse =
+          if super.stdenv.isDarwin then
+            super.fuse.overrideAttrs
+              (old: rec {
+                version = "4.8.0";
+                src = super.fetchurl {
+                  url = "https://github.com/osxfuse/osxfuse/releases/download/macfuse-${version}/macfuse-${version}.dmg";
+                  hash = "sha256-ucTzO2qdN4QkowMVvC3+4pjEVjbwMsB0xFk+bvQxwtQ=";
+                };
+              }) else super.fuse;
       })
     ];
   })
@@ -30,12 +43,15 @@ pkgs.mkShell {
     pkgs.cargo-machete
     pkgs.cargo-expand
     pkgs.clippy
+    pkgs.d2
     pkgs.evans
     pkgs.fuse
     pkgs.go
     pkgs.grpcurl
     pkgs.hyperfine
     pkgs.mdbook
+    pkgs.mdbook-admonish
+    pkgs.mdbook-d2
     pkgs.mdbook-plantuml
     pkgs.nix_2_3 # b/313
     pkgs.pkg-config
diff --git a/tvix/store-go/pathinfo.pb.go b/tvix/store-go/pathinfo.pb.go
index a4915a3c1f..7615bd155c 100644
--- a/tvix/store-go/pathinfo.pb.go
+++ b/tvix/store-go/pathinfo.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.1
 // 	protoc        (unknown)
 // source: tvix/store/protos/pathinfo.proto
 
diff --git a/tvix/store-go/rpc_pathinfo.pb.go b/tvix/store-go/rpc_pathinfo.pb.go
index 883ffb3f01..155d59896b 100644
--- a/tvix/store-go/rpc_pathinfo.pb.go
+++ b/tvix/store-go/rpc_pathinfo.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.1
 // 	protoc        (unknown)
 // source: tvix/store/protos/rpc_pathinfo.proto
 
diff --git a/tvix/store/Cargo.toml b/tvix/store/Cargo.toml
index b362a87ffb..733b7c11f2 100644
--- a/tvix/store/Cargo.toml
+++ b/tvix/store/Cargo.toml
@@ -18,9 +18,6 @@ lazy_static = "1.4.0"
 nix-compat = { path = "../nix-compat", features = ["async"] }
 pin-project-lite = "0.2.13"
 prost = "0.12.1"
-opentelemetry = { version = "0.21.0", optional = true}
-opentelemetry-otlp = { version = "0.14.0", optional = true }
-opentelemetry_sdk = { version = "0.21.0", features = ["rt-tokio"], optional = true}
 serde = { version = "1.0.197", features = [ "derive" ] }
 serde_json = "1.0"
 serde_with = "3.7.0"
@@ -29,20 +26,21 @@ sha2 = "0.10.6"
 sled = { version = "0.34.7" }
 thiserror = "1.0.38"
 tokio = { version = "1.32.0", features = ["fs", "macros", "net", "rt", "rt-multi-thread", "signal"] }
-tokio-listener = { version = "0.3.2", features = [ "tonic011" ] }
+tokio-listener = { version = "0.4.1", features = [ "tonic011" ] }
 tokio-stream = { version = "0.1.14", features = ["fs"] }
 tokio-util = { version = "0.7.9", features = ["io", "io-util", "compat"] }
 tonic = { version = "0.11.0", features = ["tls", "tls-roots"] }
 tower = "0.4.13"
-tracing = "0.1.37"
-tracing-opentelemetry = "0.22.0"
-tracing-subscriber = { version = "0.3.16", features = ["env-filter", "json"] }
+tower-http = { version = "0.4.4", features = ["trace"] }
 tvix-castore = { path = "../castore" }
 url = "2.4.0"
 walkdir = "2.4.0"
 reqwest = { version = "0.11.22", features = ["rustls-tls-native-roots", "stream"], default-features = false }
 lru = "0.12.3"
 parking_lot = "0.12.2"
+tvix-tracing = { path = "../tracing", features = ["tonic"] }
+tracing = "0.1.40"
+tracing-indicatif = "0.3.6"
 
 [dependencies.tonic-reflection]
 optional = true
@@ -72,10 +70,14 @@ cloud = [
   "tvix-castore/cloud"
 ]
 fuse = ["tvix-castore/fuse"]
-otlp = ["dep:opentelemetry", "dep:opentelemetry-otlp", "dep:opentelemetry_sdk"]
+otlp = ["tvix-tracing/otlp"]
 tonic-reflection = ["dep:tonic-reflection", "tvix-castore/tonic-reflection"]
+tracy = ["tvix-tracing/tracy"]
 virtiofs = ["tvix-castore/virtiofs"]
 # Whether to run the integration tests.
 # Requires the following packages in $PATH:
 # cbtemulator, google-cloud-bigtable-tool
 integration = []
+
+[lints]
+workspace = true
diff --git a/tvix/store/default.nix b/tvix/store/default.nix
index ad47994f24..3fe47fe60b 100644
--- a/tvix/store/default.nix
+++ b/tvix/store/default.nix
@@ -1,4 +1,4 @@
-{ depot, pkgs, ... }:
+{ depot, pkgs, lib, ... }:
 
 let
   mkImportCheck = p: expectedPath: {
@@ -22,31 +22,35 @@ let
   };
 in
 
-(depot.tvix.crates.workspaceMembers.tvix-store.build.override {
+(depot.tvix.crates.workspaceMembers.tvix-store.build.override (old: {
   runTests = true;
   testPreRun = ''
     export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt
   '';
-
-  # enable some optional features.
-  features = [ "default" "cloud" ]
-    # virtiofs feature currently fails to build on Darwin.
-    ++ pkgs.lib.optional pkgs.stdenv.isLinux "virtiofs";
-}).overrideAttrs (_: {
-  meta.ci.targets = [ "integration-tests" ];
-  meta.ci.extraSteps = {
-    import-docs = (mkImportCheck "tvix/store/docs" ./docs);
+  features = old.features
+    # virtiofs feature currently fails to build on Darwin
+    ++ lib.optional pkgs.stdenv.isLinux "virtiofs";
+})).overrideAttrs (old: rec {
+  meta.ci = {
+    targets = [ "integration-tests" ] ++ lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+    extraSteps.import-docs = (mkImportCheck "tvix/docs/src/store" ../docs/src/store);
   };
-  passthru.integration-tests = depot.tvix.crates.workspaceMembers.tvix-store.build.override {
-    runTests = true;
-    testPreRun = ''
+  passthru = (depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = ([ "cloud" "fuse" "otlp" "tonic-reflection" ]
+      # virtiofs feature currently fails to build on Darwin
+      ++ lib.optional pkgs.stdenv.isLinux "virtiofs");
+    override.testPreRun = ''
       export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt
-      export PATH="$PATH:${pkgs.lib.makeBinPath [pkgs.cbtemulator pkgs.google-cloud-bigtable-tool]}"
     '';
-
-    # enable some optional features.
-    features = [ "default" "cloud" "integration" ]
-      # virtiofs feature currently fails to build on Darwin.
-      ++ pkgs.lib.optional pkgs.stdenv.isLinux "virtiofs";
+  }) // {
+    integration-tests = depot.tvix.crates.workspaceMembers.${old.crateName}.build.override (old: {
+      runTests = true;
+      testPreRun = ''
+        export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt;
+        export PATH="$PATH:${pkgs.lib.makeBinPath [ pkgs.cbtemulator pkgs.google-cloud-bigtable-tool ]}"
+      '';
+      features = old.features ++ [ "integration" ];
+    });
   };
 })
diff --git a/tvix/store/src/bin/tvix-store.rs b/tvix/store/src/bin/tvix-store.rs
index 906d0ab520..657ce06720 100644
--- a/tvix/store/src/bin/tvix-store.rs
+++ b/tvix/store/src/bin/tvix-store.rs
@@ -13,11 +13,10 @@ use tokio_listener::Listener;
 use tokio_listener::SystemOptions;
 use tokio_listener::UserOptions;
 use tonic::transport::Server;
-use tracing::info;
-use tracing::Level;
-use tracing_subscriber::EnvFilter;
-use tracing_subscriber::Layer;
-use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
+use tower::ServiceBuilder;
+use tower_http::trace::{DefaultMakeSpan, TraceLayer};
+use tracing::{info, info_span, instrument, Level, Span};
+use tracing_indicatif::span_ext::IndicatifSpanExt as _;
 use tvix_castore::import::fs::ingest_path;
 use tvix_store::nar::NarCalculationService;
 use tvix_store::proto::NarInfo;
@@ -37,15 +36,6 @@ use tvix_store::pathinfoservice::make_fs;
 #[cfg(feature = "fuse")]
 use tvix_castore::fs::fuse::FuseDaemon;
 
-#[cfg(feature = "otlp")]
-use opentelemetry::KeyValue;
-#[cfg(feature = "otlp")]
-use opentelemetry_sdk::{
-    resource::{ResourceDetector, SdkProvidedResourceDetector},
-    trace::BatchConfig,
-    Resource,
-};
-
 #[cfg(feature = "virtiofs")]
 use tvix_castore::fs::virtiofs::start_virtiofs_daemon;
 
@@ -65,8 +55,8 @@ struct Cli {
     /// It's also possible to set `RUST_LOG` according to
     /// `tracing_subscriber::filter::EnvFilter`, which will always have
     /// priority.
-    #[arg(long)]
-    log_level: Option<Level>,
+    #[arg(long, default_value_t=Level::INFO)]
+    log_level: Level,
 
     #[command(subcommand)]
     command: Commands,
@@ -197,88 +187,15 @@ enum Commands {
     },
 }
 
-#[cfg(all(feature = "fuse", not(target_os = "macos")))]
+#[cfg(feature = "fuse")]
 fn default_threads() -> usize {
     std::thread::available_parallelism()
         .map(|threads| threads.into())
         .unwrap_or(4)
 }
-// On MacFUSE only a single channel will receive ENODEV when the file system is
-// unmounted and so all the other channels will block forever.
-// See https://github.com/osxfuse/osxfuse/issues/974
-#[cfg(all(feature = "fuse", target_os = "macos"))]
-fn default_threads() -> usize {
-    1
-}
-
-#[tokio::main]
-async fn main() -> Result<(), Box<dyn std::error::Error>> {
-    let cli = Cli::parse();
-
-    // configure log settings
-    let level = cli.log_level.unwrap_or(Level::INFO);
-
-    // Set up the tracing subscriber.
-    let subscriber = tracing_subscriber::registry().with(
-        tracing_subscriber::fmt::Layer::new()
-            .with_writer(std::io::stderr)
-            .compact()
-            .with_filter(
-                EnvFilter::builder()
-                    .with_default_directive(level.into())
-                    .from_env()
-                    .expect("invalid RUST_LOG"),
-            ),
-    );
-
-    // Add the otlp layer (when otlp is enabled, and it's not disabled in the CLI)
-    // then init the registry.
-    // If the feature is feature-flagged out, just init without adding the layer.
-    // It's necessary to do this separately, as every with() call chains the
-    // layer into the type of the registry.
-    #[cfg(feature = "otlp")]
-    {
-        let subscriber = if cli.otlp {
-            let tracer = opentelemetry_otlp::new_pipeline()
-                .tracing()
-                .with_exporter(opentelemetry_otlp::new_exporter().tonic())
-                .with_batch_config(BatchConfig::default())
-                .with_trace_config(opentelemetry_sdk::trace::config().with_resource({
-                    // use SdkProvidedResourceDetector.detect to detect resources,
-                    // but replace the default service name with our default.
-                    // https://github.com/open-telemetry/opentelemetry-rust/issues/1298
-                    let resources =
-                        SdkProvidedResourceDetector.detect(std::time::Duration::from_secs(0));
-                    // SdkProvidedResourceDetector currently always sets
-                    // `service.name`, but we don't like its default.
-                    if resources.get("service.name".into()).unwrap() == "unknown_service".into() {
-                        resources.merge(&Resource::new([KeyValue::new(
-                            "service.name",
-                            "tvix.store",
-                        )]))
-                    } else {
-                        resources
-                    }
-                }))
-                .install_batch(opentelemetry_sdk::runtime::Tokio)?;
-
-            // Create a tracing layer with the configured tracer
-            let layer = tracing_opentelemetry::layer().with_tracer(tracer);
-
-            subscriber.with(Some(layer))
-        } else {
-            subscriber.with(None)
-        };
-
-        subscriber.try_init()?;
-    }
-
-    // Init the registry (when otlp is not enabled)
-    #[cfg(not(feature = "otlp"))]
-    {
-        subscriber.try_init()?;
-    }
 
+#[instrument(skip_all)]
+async fn run_cli(cli: Cli) -> Result<(), Box<dyn std::error::Error>> {
     match cli.command {
         Commands::Daemon {
             listen_address,
@@ -300,7 +217,17 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
                 .parse()
                 .unwrap();
 
-            let mut server = Server::builder();
+            let mut server = Server::builder().layer(
+                ServiceBuilder::new()
+                    .layer(
+                        TraceLayer::new_for_grpc().make_span_with(
+                            DefaultMakeSpan::new()
+                                .level(Level::INFO)
+                                .include_headers(true),
+                        ),
+                    )
+                    .map_request(tvix_tracing::propagate::tonic::accept_trace),
+            );
 
             #[allow(unused_mut)]
             let mut router = server
@@ -416,15 +343,26 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
             // Arc the PathInfoService, as we clone it .
             let path_info_service: Arc<dyn PathInfoService> = path_info_service.into();
 
+            let lookups_span = info_span!(
+                "lookup pathinfos",
+                "indicatif.pb_show" = tracing::field::Empty
+            );
+            lookups_span.pb_set_length(reference_graph.closure.len() as u64);
+            lookups_span.pb_set_style(&tvix_tracing::PB_PROGRESS_STYLE);
+            lookups_span.pb_start();
+
             // From our reference graph, lookup all pathinfos that might exist.
             let elems: Vec<_> = futures::stream::iter(reference_graph.closure)
                 .map(|elem| {
                     let path_info_service = path_info_service.clone();
                     async move {
-                        path_info_service
+                        let resp = path_info_service
                             .get(*elem.path.digest())
                             .await
-                            .map(|resp| (elem, resp))
+                            .map(|resp| (elem, resp));
+
+                        Span::current().pb_inc(1);
+                        resp
                     }
                 })
                 .buffer_unordered(50)
@@ -508,7 +446,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
                 )
                 .await?;
 
-            let mut fuse_daemon = tokio::task::spawn_blocking(move || {
+            let fuse_daemon = tokio::task::spawn_blocking(move || {
                 let fs = make_fs(
                     blob_service,
                     directory_service,
@@ -522,16 +460,22 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
             })
             .await??;
 
-            // grab a handle to unmount the file system, and register a signal
-            // handler.
-            tokio::spawn(async move {
-                tokio::signal::ctrl_c().await.unwrap();
-                info!("interrupt received, unmounting…");
-                tokio::task::spawn_blocking(move || fuse_daemon.unmount()).await??;
-                info!("unmount occured, terminating…");
-                Ok::<_, std::io::Error>(())
-            })
-            .await??;
+            // Wait for a ctrl_c and then call fuse_daemon.unmount().
+            tokio::spawn({
+                let fuse_daemon = fuse_daemon.clone();
+                async move {
+                    tokio::signal::ctrl_c().await.unwrap();
+                    info!("interrupt received, unmounting…");
+                    tokio::task::spawn_blocking(move || fuse_daemon.unmount()).await??;
+                    info!("unmount occured, terminating…");
+                    Ok::<_, std::io::Error>(())
+                }
+            });
+
+            // Wait for the server to finish, which can either happen through it
+            // being unmounted externally, or receiving a signal invoking the
+            // handler above.
+            tokio::task::spawn_blocking(move || fuse_daemon.wait()).await?
         }
         #[cfg(feature = "virtiofs")]
         Commands::VirtioFs {
@@ -567,3 +511,36 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
     };
     Ok(())
 }
+
+#[tokio::main]
+async fn main() -> Result<(), Box<dyn std::error::Error>> {
+    let cli = Cli::parse();
+
+    let tracing_handle = {
+        let mut builder = tvix_tracing::TracingBuilder::default();
+        builder = builder.level(cli.log_level).enable_progressbar();
+        #[cfg(feature = "otlp")]
+        {
+            if cli.otlp {
+                builder = builder.enable_otlp("tvix.store");
+            }
+        }
+        builder.build()?
+    };
+
+    tokio::select! {
+        res = tokio::signal::ctrl_c() => {
+            res?;
+            if let Err(e) = tracing_handle.force_shutdown().await {
+                eprintln!("failed to shutdown tracing: {e}");
+            }
+            Ok(())
+        },
+        res = run_cli(cli) => {
+            if let Err(e) = tracing_handle.shutdown().await {
+                eprintln!("failed to shutdown tracing: {e}");
+            }
+            res
+        }
+    }
+}
diff --git a/tvix/store/src/import.rs b/tvix/store/src/import.rs
index 888380bca9..9d7a995581 100644
--- a/tvix/store/src/import.rs
+++ b/tvix/store/src/import.rs
@@ -102,8 +102,9 @@ pub fn derive_nar_ca_path_info(
     }
 }
 
-/// Ingest the given path `path` and register the resulting output path in the
-/// [`PathInfoService`] as a recursive fixed output NAR.
+/// Ingest the contents at the given path `path` into castore, and registers the
+/// resulting root node in the passed PathInfoService, using the "NAR sha256
+/// digest" and the passed name for output path calculation.
 #[instrument(skip_all, fields(store_name=name, path=?path), err)]
 pub async fn import_path_as_nar_ca<BS, DS, PS, NS, P>(
     path: P,
@@ -137,7 +138,7 @@ where
         )
     })?;
 
-    // assemble a new root_node with a name that is derived from the nar hash.
+    // rename the root node to match the calculated output path.
     let root_node = root_node.rename(output_path.to_string().into_bytes().into());
     log_node(&root_node, path.as_ref());
 
@@ -150,7 +151,7 @@ where
 
     // This new [`PathInfo`] that we get back from there might contain additional signatures or
     // information set by the service itself. In this function, we silently swallow it because
-    // callers doesn't really need it.
+    // callers don't really need it.
     let _path_info = path_info_service.as_ref().put(path_info).await?;
 
     Ok(output_path.to_owned())
diff --git a/tvix/store/src/nar/import.rs b/tvix/store/src/nar/import.rs
index 3d7c50014a..32c2f4e580 100644
--- a/tvix/store/src/nar/import.rs
+++ b/tvix/store/src/nar/import.rs
@@ -1,15 +1,57 @@
 use nix_compat::nar::reader::r#async as nar_reader;
-use tokio::{io::AsyncBufRead, sync::mpsc, try_join};
+use sha2::Digest;
+use tokio::{
+    io::{AsyncBufRead, AsyncRead},
+    sync::mpsc,
+    try_join,
+};
 use tvix_castore::{
     blobservice::BlobService,
     directoryservice::DirectoryService,
-    import::{ingest_entries, IngestionEntry, IngestionError},
+    import::{
+        blobs::{self, ConcurrentBlobUploader},
+        ingest_entries, IngestionEntry, IngestionError,
+    },
     proto::{node::Node, NamedNode},
     PathBuf,
 };
 
 /// Ingests the contents from a [AsyncRead] providing NAR into the tvix store,
 /// interacting with a [BlobService] and [DirectoryService].
+/// Returns the castore root node, as well as the sha256 and size of the NAR
+/// contents ingested.
+pub async fn ingest_nar_and_hash<R, BS, DS>(
+    blob_service: BS,
+    directory_service: DS,
+    r: &mut R,
+) -> Result<(Node, [u8; 32], u64), IngestionError<Error>>
+where
+    R: AsyncRead + Unpin + Send,
+    BS: BlobService + Clone + 'static,
+    DS: DirectoryService,
+{
+    let mut nar_hash = sha2::Sha256::new();
+    let mut nar_size = 0;
+
+    // Assemble NarHash and NarSize as we read bytes.
+    let r = tokio_util::io::InspectReader::new(r, |b| {
+        nar_size += b.len() as u64;
+        use std::io::Write;
+        nar_hash.write_all(b).unwrap();
+    });
+
+    // HACK: InspectReader doesn't implement AsyncBufRead.
+    // See if this can be propagated through and we can then require our input
+    // reader to be buffered too.
+    let mut r = tokio::io::BufReader::new(r);
+
+    let root_node = ingest_nar(blob_service, directory_service, &mut r).await?;
+
+    Ok((root_node, nar_hash.finalize().into(), nar_size))
+}
+
+/// Ingests the contents from a [AsyncRead] providing NAR into the tvix store,
+/// interacting with a [BlobService] and [DirectoryService].
 /// It returns the castore root node or an error.
 pub async fn ingest_nar<R, BS, DS>(
     blob_service: BS,
@@ -18,7 +60,7 @@ pub async fn ingest_nar<R, BS, DS>(
 ) -> Result<Node, IngestionError<Error>>
 where
     R: AsyncBufRead + Unpin + Send,
-    BS: BlobService + Clone,
+    BS: BlobService + Clone + 'static,
     DS: DirectoryService,
 {
     // open the NAR for reading.
@@ -29,14 +71,22 @@ where
     let rx = tokio_stream::wrappers::ReceiverStream::new(rx);
 
     let produce = async move {
+        let mut blob_uploader = ConcurrentBlobUploader::new(blob_service);
+
         let res = produce_nar_inner(
-            blob_service,
+            &mut blob_uploader,
             root_node,
             "root".parse().unwrap(), // HACK: the root node sent to ingest_entries may not be ROOT.
             tx.clone(),
         )
         .await;
 
+        if let Err(err) = blob_uploader.join().await {
+            tx.send(Err(err.into()))
+                .await
+                .map_err(|e| Error::IO(std::io::Error::new(std::io::ErrorKind::BrokenPipe, e)))?;
+        }
+
         tx.send(res)
             .await
             .map_err(|e| Error::IO(std::io::Error::new(std::io::ErrorKind::BrokenPipe, e)))?;
@@ -54,13 +104,13 @@ where
 }
 
 async fn produce_nar_inner<BS>(
-    blob_service: BS,
+    blob_uploader: &mut ConcurrentBlobUploader<BS>,
     node: nar_reader::Node<'_, '_>,
     path: PathBuf,
     tx: mpsc::Sender<Result<IngestionEntry, Error>>,
 ) -> Result<IngestionEntry, Error>
 where
-    BS: BlobService + Clone,
+    BS: BlobService + Clone + 'static,
 {
     Ok(match node {
         nar_reader::Node::Symlink { target } => IngestionEntry::Symlink { path, target },
@@ -68,12 +118,8 @@ where
             executable,
             mut reader,
         } => {
-            let (digest, size) = {
-                let mut blob_writer = blob_service.open_write().await;
-                let size = tokio::io::copy_buf(&mut reader, &mut blob_writer).await?;
-
-                (blob_writer.close().await?, size)
-            };
+            let size = reader.len();
+            let digest = blob_uploader.upload(&path, size, &mut reader).await?;
 
             IngestionEntry::Regular {
                 path,
@@ -91,7 +137,7 @@ where
                     .expect("Tvix bug: failed to join name");
 
                 let entry = Box::pin(produce_nar_inner(
-                    blob_service.clone(),
+                    blob_uploader,
                     entry.node,
                     path,
                     tx.clone(),
@@ -112,6 +158,9 @@ where
 pub enum Error {
     #[error(transparent)]
     IO(#[from] std::io::Error),
+
+    #[error(transparent)]
+    BlobUpload(#[from] blobs::Error),
 }
 
 #[cfg(test)]
diff --git a/tvix/store/src/nar/mod.rs b/tvix/store/src/nar/mod.rs
index 164748a655..8cbb091f1a 100644
--- a/tvix/store/src/nar/mod.rs
+++ b/tvix/store/src/nar/mod.rs
@@ -4,6 +4,7 @@ use tvix_castore::B3Digest;
 mod import;
 mod renderer;
 pub use import::ingest_nar;
+pub use import::ingest_nar_and_hash;
 pub use renderer::calculate_size_and_sha256;
 pub use renderer::write_nar;
 pub use renderer::SimpleRenderer;
diff --git a/tvix/store/src/nar/renderer.rs b/tvix/store/src/nar/renderer.rs
index efd67671db..e3cb54dd22 100644
--- a/tvix/store/src/nar/renderer.rs
+++ b/tvix/store/src/nar/renderer.rs
@@ -6,6 +6,8 @@ use nix_compat::nar::writer::r#async as nar_writer;
 use sha2::{Digest, Sha256};
 use tokio::io::{self, AsyncWrite, BufReader};
 use tonic::async_trait;
+use tracing::{instrument, Span};
+use tracing_indicatif::span_ext::IndicatifSpanExt;
 use tvix_castore::{
     blobservice::BlobService,
     directoryservice::DirectoryService,
@@ -48,6 +50,7 @@ where
 
 /// Invoke [write_nar], and return the size and sha256 digest of the produced
 /// NAR output.
+#[instrument(skip_all, fields(indicatif.pb_show=1))]
 pub async fn calculate_size_and_sha256<BS, DS>(
     root_node: &castorepb::node::Node,
     blob_service: BS,
@@ -60,6 +63,10 @@ where
     let mut h = Sha256::new();
     let mut cw = CountWrite::from(&mut h);
 
+    let span = Span::current();
+    span.pb_set_message("Calculating NAR");
+    span.pb_start();
+
     write_nar(
         // The hasher doesn't speak async. It doesn't
         // actually do any I/O, so it's fine to wrap.
diff --git a/tvix/store/src/pathinfoservice/bigtable.rs b/tvix/store/src/pathinfoservice/bigtable.rs
index d608cbdb81..707a686c0a 100644
--- a/tvix/store/src/pathinfoservice/bigtable.rs
+++ b/tvix/store/src/pathinfoservice/bigtable.rs
@@ -6,11 +6,12 @@ use bigtable_rs::{bigtable, google::bigtable::v2 as bigtable_v2};
 use bytes::Bytes;
 use data_encoding::HEXLOWER;
 use futures::stream::BoxStream;
+use nix_compat::nixbase32;
 use prost::Message;
 use serde::{Deserialize, Serialize};
 use serde_with::{serde_as, DurationSeconds};
 use tonic::async_trait;
-use tracing::trace;
+use tracing::{instrument, trace};
 use tvix_castore::Error;
 
 /// There should not be more than 10 MiB in a single cell.
@@ -66,6 +67,22 @@ pub struct BigtableParameters {
     app_profile_id: String,
 }
 
+impl BigtableParameters {
+    #[cfg(test)]
+    pub fn default_for_tests() -> Self {
+        Self {
+            project_id: "project-1".into(),
+            instance_name: "instance-1".into(),
+            is_read_only: false,
+            channel_size: default_channel_size(),
+            timeout: default_timeout(),
+            table_name: "table-1".into(),
+            family_name: "cf1".into(),
+            app_profile_id: default_app_profile_id(),
+        }
+    }
+}
+
 fn default_app_profile_id() -> String {
     "default".to_owned()
 }
@@ -181,6 +198,7 @@ fn derive_pathinfo_key(digest: &[u8; 20]) -> String {
 
 #[async_trait]
 impl PathInfoService for BigtablePathInfoService {
+    #[instrument(level = "trace", skip_all, fields(path_info.digest = nixbase32::encode(&digest)))]
     async fn get(&self, digest: [u8; 20]) -> Result<Option<PathInfo>, Error> {
         let mut client = self.client.clone();
         let path_info_key = derive_pathinfo_key(&digest);
@@ -277,6 +295,7 @@ impl PathInfoService for BigtablePathInfoService {
         Ok(Some(path_info))
     }
 
+    #[instrument(level = "trace", skip_all, fields(path_info.root_node = ?path_info.node))]
     async fn put(&self, path_info: PathInfo) -> Result<PathInfo, Error> {
         let store_path = path_info
             .validate()
diff --git a/tvix/store/src/pathinfoservice/combinators.rs b/tvix/store/src/pathinfoservice/combinators.rs
new file mode 100644
index 0000000000..664144ef49
--- /dev/null
+++ b/tvix/store/src/pathinfoservice/combinators.rs
@@ -0,0 +1,111 @@
+use crate::proto::PathInfo;
+use futures::stream::BoxStream;
+use nix_compat::nixbase32;
+use tonic::async_trait;
+use tracing::{debug, instrument};
+use tvix_castore::Error;
+
+use super::PathInfoService;
+
+/// Asks near first, if not found, asks far.
+/// If found in there, returns it, and *inserts* it into
+/// near.
+/// There is no negative cache.
+/// Inserts and listings are not implemented for now.
+pub struct Cache<PS1, PS2> {
+    near: PS1,
+    far: PS2,
+}
+
+impl<PS1, PS2> Cache<PS1, PS2> {
+    pub fn new(near: PS1, far: PS2) -> Self {
+        Self { near, far }
+    }
+}
+
+#[async_trait]
+impl<PS1, PS2> PathInfoService for Cache<PS1, PS2>
+where
+    PS1: PathInfoService,
+    PS2: PathInfoService,
+{
+    #[instrument(level = "trace", skip_all, fields(path_info.digest = nixbase32::encode(&digest)))]
+    async fn get(&self, digest: [u8; 20]) -> Result<Option<PathInfo>, Error> {
+        match self.near.get(digest).await? {
+            Some(path_info) => {
+                debug!("serving from cache");
+                Ok(Some(path_info))
+            }
+            None => {
+                debug!("not found in near, asking remote…");
+                match self.far.get(digest).await? {
+                    None => Ok(None),
+                    Some(path_info) => {
+                        debug!("found in remote, adding to cache");
+                        self.near.put(path_info.clone()).await?;
+                        Ok(Some(path_info))
+                    }
+                }
+            }
+        }
+    }
+
+    async fn put(&self, _path_info: PathInfo) -> Result<PathInfo, Error> {
+        Err(Error::StorageError("unimplemented".to_string()))
+    }
+
+    fn list(&self) -> BoxStream<'static, Result<PathInfo, Error>> {
+        Box::pin(tokio_stream::once(Err(Error::StorageError(
+            "unimplemented".to_string(),
+        ))))
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use std::num::NonZeroUsize;
+
+    use crate::{
+        pathinfoservice::{LruPathInfoService, MemoryPathInfoService, PathInfoService},
+        tests::fixtures::PATH_INFO_WITH_NARINFO,
+    };
+
+    const PATH_INFO_DIGEST: [u8; 20] = [0; 20];
+
+    /// Helper function setting up an instance of a "far" and "near"
+    /// PathInfoService.
+    async fn create_pathinfoservice() -> super::Cache<LruPathInfoService, MemoryPathInfoService> {
+        // Create an instance of a "far" PathInfoService.
+        let far = MemoryPathInfoService::default();
+
+        // … and an instance of a "near" PathInfoService.
+        let near = LruPathInfoService::with_capacity(NonZeroUsize::new(1).unwrap());
+
+        // create a Pathinfoservice combining the two and return it.
+        super::Cache::new(near, far)
+    }
+
+    /// Getting from the far backend is gonna insert it into the near one.
+    #[tokio::test]
+    async fn test_populate_cache() {
+        let svc = create_pathinfoservice().await;
+
+        // query the PathInfo, things should not be there.
+        assert!(svc.get(PATH_INFO_DIGEST).await.unwrap().is_none());
+
+        // insert it into the far one.
+        svc.far.put(PATH_INFO_WITH_NARINFO.clone()).await.unwrap();
+
+        // now try getting it again, it should succeed.
+        assert_eq!(
+            Some(PATH_INFO_WITH_NARINFO.clone()),
+            svc.get(PATH_INFO_DIGEST).await.unwrap()
+        );
+
+        // peek near, it should now be there.
+        assert_eq!(
+            Some(PATH_INFO_WITH_NARINFO.clone()),
+            svc.near.get(PATH_INFO_DIGEST).await.unwrap()
+        );
+    }
+}
diff --git a/tvix/store/src/pathinfoservice/from_addr.rs b/tvix/store/src/pathinfoservice/from_addr.rs
index 455909e7f2..9173d25d05 100644
--- a/tvix/store/src/pathinfoservice/from_addr.rs
+++ b/tvix/store/src/pathinfoservice/from_addr.rs
@@ -105,9 +105,12 @@ pub async fn from_addr(
             // - In the case of unix sockets, there must be a path, but may not be a host.
             // - In the case of non-unix sockets, there must be a host, but no path.
             // Constructing the channel is handled by tvix_castore::channel::from_url.
-            let client =
-                PathInfoServiceClient::new(tvix_castore::tonic::channel_from_url(&url).await?);
-            Box::new(GRPCPathInfoService::from_client(client))
+            Box::new(GRPCPathInfoService::from_client(
+                PathInfoServiceClient::with_interceptor(
+                    tvix_castore::tonic::channel_from_url(&url).await?,
+                    tvix_tracing::propagate::tonic::send_trace,
+                ),
+            ))
         }
         #[cfg(feature = "cloud")]
         "bigtable" => {
diff --git a/tvix/store/src/pathinfoservice/grpc.rs b/tvix/store/src/pathinfoservice/grpc.rs
index 37239ccc12..bcee49aac6 100644
--- a/tvix/store/src/pathinfoservice/grpc.rs
+++ b/tvix/store/src/pathinfoservice/grpc.rs
@@ -4,33 +4,40 @@ use crate::{
     proto::{self, ListPathInfoRequest, PathInfo},
 };
 use async_stream::try_stream;
-use data_encoding::BASE64;
 use futures::stream::BoxStream;
-use tonic::{async_trait, transport::Channel, Code};
-use tracing::instrument;
+use nix_compat::nixbase32;
+use tonic::{async_trait, Code};
+use tracing::{instrument, Span};
+use tracing_indicatif::span_ext::IndicatifSpanExt;
 use tvix_castore::{proto as castorepb, Error};
 
 /// Connects to a (remote) tvix-store PathInfoService over gRPC.
 #[derive(Clone)]
-pub struct GRPCPathInfoService {
+pub struct GRPCPathInfoService<T> {
     /// The internal reference to a gRPC client.
     /// Cloning it is cheap, and it internally handles concurrent requests.
-    grpc_client: proto::path_info_service_client::PathInfoServiceClient<Channel>,
+    grpc_client: proto::path_info_service_client::PathInfoServiceClient<T>,
 }
 
-impl GRPCPathInfoService {
+impl<T> GRPCPathInfoService<T> {
     /// construct a [GRPCPathInfoService] from a [proto::path_info_service_client::PathInfoServiceClient].
     /// panics if called outside the context of a tokio runtime.
     pub fn from_client(
-        grpc_client: proto::path_info_service_client::PathInfoServiceClient<Channel>,
+        grpc_client: proto::path_info_service_client::PathInfoServiceClient<T>,
     ) -> Self {
         Self { grpc_client }
     }
 }
 
 #[async_trait]
-impl PathInfoService for GRPCPathInfoService {
-    #[instrument(level = "trace", skip_all, fields(path_info.digest = BASE64.encode(&digest)))]
+impl<T> PathInfoService for GRPCPathInfoService<T>
+where
+    T: tonic::client::GrpcService<tonic::body::BoxBody> + Send + Sync + Clone + 'static,
+    T::ResponseBody: tonic::codegen::Body<Data = tonic::codegen::Bytes> + Send + 'static,
+    <T::ResponseBody as tonic::codegen::Body>::Error: Into<tonic::codegen::StdError> + Send,
+    T::Future: Send,
+{
+    #[instrument(level = "trace", skip_all, fields(path_info.digest = nixbase32::encode(&digest)))]
     async fn get(&self, digest: [u8; 20]) -> Result<Option<PathInfo>, Error> {
         let path_info = self
             .grpc_client
@@ -106,12 +113,22 @@ impl PathInfoService for GRPCPathInfoService {
 }
 
 #[async_trait]
-impl NarCalculationService for GRPCPathInfoService {
-    #[instrument(level = "trace", skip_all, fields(root_node = ?root_node))]
+impl<T> NarCalculationService for GRPCPathInfoService<T>
+where
+    T: tonic::client::GrpcService<tonic::body::BoxBody> + Send + Sync + Clone + 'static,
+    T::ResponseBody: tonic::codegen::Body<Data = tonic::codegen::Bytes> + Send + 'static,
+    <T::ResponseBody as tonic::codegen::Body>::Error: Into<tonic::codegen::StdError> + Send,
+    T::Future: Send,
+{
+    #[instrument(level = "trace", skip_all, fields(root_node = ?root_node, indicatif.pb_show=1))]
     async fn calculate_nar(
         &self,
         root_node: &castorepb::node::Node,
     ) -> Result<(u64, [u8; 32]), Error> {
+        let span = Span::current();
+        span.pb_set_message("Waiting for NAR calculation");
+        span.pb_start();
+
         let path_info = self
             .grpc_client
             .clone()
@@ -135,6 +152,7 @@ impl NarCalculationService for GRPCPathInfoService {
 #[cfg(test)]
 mod tests {
     use crate::pathinfoservice::tests::make_grpc_path_info_service_client;
+    use crate::pathinfoservice::PathInfoService;
     use crate::tests::fixtures;
 
     /// This ensures connecting via gRPC works as expected.
diff --git a/tvix/store/src/pathinfoservice/lru.rs b/tvix/store/src/pathinfoservice/lru.rs
index f3790a9054..da674f497a 100644
--- a/tvix/store/src/pathinfoservice/lru.rs
+++ b/tvix/store/src/pathinfoservice/lru.rs
@@ -1,11 +1,12 @@
-use std::num::NonZeroUsize;
-use std::sync::Arc;
-use tokio::sync::RwLock;
-
 use async_stream::try_stream;
 use futures::stream::BoxStream;
 use lru::LruCache;
+use nix_compat::nixbase32;
+use std::num::NonZeroUsize;
+use std::sync::Arc;
+use tokio::sync::RwLock;
 use tonic::async_trait;
+use tracing::instrument;
 
 use crate::proto::PathInfo;
 use tvix_castore::Error;
@@ -26,10 +27,12 @@ impl LruPathInfoService {
 
 #[async_trait]
 impl PathInfoService for LruPathInfoService {
+    #[instrument(level = "trace", skip_all, fields(path_info.digest = nixbase32::encode(&digest)))]
     async fn get(&self, digest: [u8; 20]) -> Result<Option<PathInfo>, Error> {
         Ok(self.lru.write().await.get(&digest).cloned())
     }
 
+    #[instrument(level = "trace", skip_all, fields(path_info.root_node = ?path_info.node))]
     async fn put(&self, path_info: PathInfo) -> Result<PathInfo, Error> {
         // call validate
         let store_path = path_info
diff --git a/tvix/store/src/pathinfoservice/memory.rs b/tvix/store/src/pathinfoservice/memory.rs
index f1bbf67f81..3de3221df2 100644
--- a/tvix/store/src/pathinfoservice/memory.rs
+++ b/tvix/store/src/pathinfoservice/memory.rs
@@ -2,9 +2,11 @@ use super::PathInfoService;
 use crate::proto::PathInfo;
 use async_stream::try_stream;
 use futures::stream::BoxStream;
+use nix_compat::nixbase32;
 use std::{collections::HashMap, sync::Arc};
 use tokio::sync::RwLock;
 use tonic::async_trait;
+use tracing::instrument;
 use tvix_castore::Error;
 
 #[derive(Default)]
@@ -14,6 +16,7 @@ pub struct MemoryPathInfoService {
 
 #[async_trait]
 impl PathInfoService for MemoryPathInfoService {
+    #[instrument(level = "trace", skip_all, fields(path_info.digest = nixbase32::encode(&digest)))]
     async fn get(&self, digest: [u8; 20]) -> Result<Option<PathInfo>, Error> {
         let db = self.db.read().await;
 
@@ -23,6 +26,7 @@ impl PathInfoService for MemoryPathInfoService {
         }
     }
 
+    #[instrument(level = "trace", skip_all, fields(path_info.root_node = ?path_info.node))]
     async fn put(&self, path_info: PathInfo) -> Result<PathInfo, Error> {
         // Call validate on the received PathInfo message.
         match path_info.validate() {
diff --git a/tvix/store/src/pathinfoservice/mod.rs b/tvix/store/src/pathinfoservice/mod.rs
index 5f5da2bff2..574bcc0b8b 100644
--- a/tvix/store/src/pathinfoservice/mod.rs
+++ b/tvix/store/src/pathinfoservice/mod.rs
@@ -1,3 +1,4 @@
+mod combinators;
 mod from_addr;
 mod grpc;
 mod lru;
@@ -17,6 +18,7 @@ use tvix_castore::Error;
 
 use crate::proto::PathInfo;
 
+pub use self::combinators::Cache as CachePathInfoService;
 pub use self::from_addr::from_addr;
 pub use self::grpc::GRPCPathInfoService;
 pub use self::lru::LruPathInfoService;
diff --git a/tvix/store/src/pathinfoservice/nix_http.rs b/tvix/store/src/pathinfoservice/nix_http.rs
index 08cd1d0ecb..1dd7da4831 100644
--- a/tvix/store/src/pathinfoservice/nix_http.rs
+++ b/tvix/store/src/pathinfoservice/nix_http.rs
@@ -1,4 +1,5 @@
-use data_encoding::BASE64;
+use super::PathInfoService;
+use crate::{nar::ingest_nar_and_hash, proto::PathInfo};
 use futures::{stream::BoxStream, TryStreamExt};
 use nix_compat::{
     narinfo::{self, NarInfo},
@@ -6,20 +7,13 @@ use nix_compat::{
     nixhash::NixHash,
 };
 use reqwest::StatusCode;
-use sha2::Digest;
-use std::io::{self, Write};
-use tokio::io::{AsyncRead, BufReader};
-use tokio_util::io::InspectReader;
+use tokio::io::{self, AsyncRead};
 use tonic::async_trait;
 use tracing::{debug, instrument, warn};
 use tvix_castore::{
     blobservice::BlobService, directoryservice::DirectoryService, proto as castorepb, Error,
 };
 
-use crate::proto::PathInfo;
-
-use super::PathInfoService;
-
 /// NixHTTPPathInfoService acts as a bridge in between the Nix HTTP Binary cache
 /// protocol provided by Nix binary caches such as cache.nixos.org, and the Tvix
 /// Store Model.
@@ -71,7 +65,7 @@ where
     BS: AsRef<dyn BlobService> + Send + Sync + Clone + 'static,
     DS: AsRef<dyn DirectoryService> + Send + Sync + Clone + 'static,
 {
-    #[instrument(skip_all, err, fields(path.digest=BASE64.encode(&digest)))]
+    #[instrument(skip_all, err, fields(path.digest=nixbase32::encode(&digest)))]
     async fn get(&self, digest: [u8; 20]) -> Result<Option<PathInfo>, Error> {
         let narinfo_url = self
             .base_url
@@ -179,7 +173,7 @@ where
         }));
 
         // handle decompression, depending on the compression field.
-        let r: Box<dyn AsyncRead + Send + Unpin> = match narinfo.compression {
+        let mut r: Box<dyn AsyncRead + Send + Unpin> = match narinfo.compression {
             Some("none") => Box::new(r) as Box<dyn AsyncRead + Send + Unpin>,
             Some("bzip2") | None => Box::new(async_compression::tokio::bufread::BzDecoder::new(r))
                 as Box<dyn AsyncRead + Send + Unpin>,
@@ -195,19 +189,8 @@ where
                 )));
             }
         };
-        let mut nar_hash = sha2::Sha256::new();
-        let mut nar_size = 0;
-
-        // Assemble NarHash and NarSize as we read bytes.
-        let r = InspectReader::new(r, |b| {
-            nar_size += b.len() as u64;
-            nar_hash.write_all(b).unwrap();
-        });
-
-        // HACK: InspectReader doesn't implement AsyncBufRead, but neither do our decompressors.
-        let mut r = BufReader::new(r);
 
-        let root_node = crate::nar::ingest_nar(
+        let (root_node, nar_hash, nar_size) = ingest_nar_and_hash(
             self.blob_service.clone(),
             self.directory_service.clone(),
             &mut r,
@@ -227,7 +210,6 @@ where
                 "NarSize mismatch".to_string(),
             ))?;
         }
-        let nar_hash: [u8; 32] = nar_hash.finalize().into();
         if narinfo.nar_hash != nar_hash {
             warn!(
                 narinfo.nar_hash = %NixHash::Sha256(narinfo.nar_hash),
diff --git a/tvix/store/src/pathinfoservice/sled.rs b/tvix/store/src/pathinfoservice/sled.rs
index 876155b115..96ade18169 100644
--- a/tvix/store/src/pathinfoservice/sled.rs
+++ b/tvix/store/src/pathinfoservice/sled.rs
@@ -1,13 +1,12 @@
 use super::PathInfoService;
 use crate::proto::PathInfo;
 use async_stream::try_stream;
-use data_encoding::BASE64;
 use futures::stream::BoxStream;
+use nix_compat::nixbase32;
 use prost::Message;
 use std::path::Path;
 use tonic::async_trait;
-use tracing::instrument;
-use tracing::warn;
+use tracing::{instrument, warn};
 use tvix_castore::Error;
 
 /// SledPathInfoService stores PathInfo in a [sled](https://github.com/spacejam/sled).
@@ -38,7 +37,7 @@ impl SledPathInfoService {
 
 #[async_trait]
 impl PathInfoService for SledPathInfoService {
-    #[instrument(level = "trace", skip_all, fields(path_info.digest = BASE64.encode(&digest)))]
+    #[instrument(level = "trace", skip_all, fields(path_info.digest = nixbase32::encode(&digest)))]
     async fn get(&self, digest: [u8; 20]) -> Result<Option<PathInfo>, Error> {
         let resp = tokio::task::spawn_blocking({
             let db = self.db.clone();
diff --git a/tvix/store/src/pathinfoservice/tests/mod.rs b/tvix/store/src/pathinfoservice/tests/mod.rs
index 26166d1b75..061655e4ba 100644
--- a/tvix/store/src/pathinfoservice/tests/mod.rs
+++ b/tvix/store/src/pathinfoservice/tests/mod.rs
@@ -4,62 +4,30 @@
 
 use rstest::*;
 use rstest_reuse::{self, *};
-use std::sync::Arc;
-use tvix_castore::proto as castorepb;
-use tvix_castore::{blobservice::BlobService, directoryservice::DirectoryService};
 
 use super::PathInfoService;
+use crate::pathinfoservice::MemoryPathInfoService;
+use crate::pathinfoservice::SledPathInfoService;
 use crate::proto::PathInfo;
 use crate::tests::fixtures::DUMMY_PATH_DIGEST;
+use tvix_castore::proto as castorepb;
 
 mod utils;
 pub use self::utils::make_grpc_path_info_service_client;
 
-/// Convenience type alias batching all three servives together.
-#[allow(clippy::upper_case_acronyms)]
-type BSDSPS = (
-    Arc<dyn BlobService>,
-    Arc<dyn DirectoryService>,
-    Box<dyn PathInfoService>,
-);
-
-/// Creates a PathInfoService using a new Memory{Blob,Directory}Service.
-/// We return a 3-tuple containing all of them, as some tests want to interact
-/// with all three.
-pub async fn make_path_info_service(uri: &str) -> BSDSPS {
-    let blob_service: Arc<dyn BlobService> = tvix_castore::blobservice::from_addr("memory://")
-        .await
-        .unwrap()
-        .into();
-    let directory_service: Arc<dyn DirectoryService> =
-        tvix_castore::directoryservice::from_addr("memory://")
-            .await
-            .unwrap()
-            .into();
-
-    (
-        blob_service.clone(),
-        directory_service.clone(),
-        crate::pathinfoservice::from_addr(uri, blob_service, directory_service)
-            .await
-            .unwrap(),
-    )
-}
+#[cfg(all(feature = "cloud", feature = "integration"))]
+use self::utils::make_bigtable_path_info_service;
 
 #[template]
 #[rstest]
-#[case::memory(make_path_info_service("memory://").await)]
-#[case::grpc(make_grpc_path_info_service_client().await)]
-#[case::sled(make_path_info_service("sled://").await)]
-#[cfg_attr(all(feature = "cloud",feature="integration"), case::bigtable(make_path_info_service("bigtable://instance-1?project_id=project-1&table_name=table-1&family_name=cf1").await))]
-pub fn path_info_services(
-    #[case] services: (
-        impl BlobService,
-        impl DirectoryService,
-        impl PathInfoService,
-    ),
-) {
-}
+#[case::memory(MemoryPathInfoService::default())]
+#[case::grpc({
+    let (_, _, svc) = make_grpc_path_info_service_client().await;
+    svc
+})]
+#[case::sled(SledPathInfoService::new_temporary().unwrap())]
+#[cfg_attr(all(feature = "cloud",feature="integration"), case::bigtable(make_bigtable_path_info_service().await))]
+pub fn path_info_services(#[case] svc: impl PathInfoService) {}
 
 // FUTUREWORK: add more tests rejecting invalid PathInfo messages.
 // A subset of them should also ensure references to other PathInfos, or
@@ -68,9 +36,8 @@ pub fn path_info_services(
 /// Trying to get a non-existent PathInfo should return Ok(None).
 #[apply(path_info_services)]
 #[tokio::test]
-async fn not_found(services: BSDSPS) {
-    let (_, _, path_info_service) = services;
-    assert!(path_info_service
+async fn not_found(svc: impl PathInfoService) {
+    assert!(svc
         .get(DUMMY_PATH_DIGEST)
         .await
         .expect("must succeed")
@@ -80,9 +47,7 @@ async fn not_found(services: BSDSPS) {
 /// Put a PathInfo into the store, get it back.
 #[apply(path_info_services)]
 #[tokio::test]
-async fn put_get(services: BSDSPS) {
-    let (_, _, path_info_service) = services;
-
+async fn put_get(svc: impl PathInfoService) {
     let path_info = PathInfo {
         node: Some(castorepb::Node {
             node: Some(castorepb::node::Node::Symlink(castorepb::SymlinkNode {
@@ -94,20 +59,14 @@ async fn put_get(services: BSDSPS) {
     };
 
     // insert
-    let resp = path_info_service
-        .put(path_info.clone())
-        .await
-        .expect("must succeed");
+    let resp = svc.put(path_info.clone()).await.expect("must succeed");
 
     // expect the returned PathInfo to be equal (for now)
     // in the future, some stores might add additional fields/signatures.
     assert_eq!(path_info, resp);
 
     // get it back
-    let resp = path_info_service
-        .get(DUMMY_PATH_DIGEST)
-        .await
-        .expect("must succeed");
+    let resp = svc.get(DUMMY_PATH_DIGEST).await.expect("must succeed");
 
     assert_eq!(Some(path_info), resp);
 }
diff --git a/tvix/store/src/pathinfoservice/tests/utils.rs b/tvix/store/src/pathinfoservice/tests/utils.rs
index 30c5902b61..3e4fe5c05a 100644
--- a/tvix/store/src/pathinfoservice/tests/utils.rs
+++ b/tvix/store/src/pathinfoservice/tests/utils.rs
@@ -1,6 +1,7 @@
 use std::sync::Arc;
 
 use tonic::transport::{Endpoint, Server, Uri};
+use tvix_castore::{blobservice::BlobService, directoryservice::DirectoryService};
 
 use crate::{
     nar::{NarCalculationService, SimpleRenderer},
@@ -15,7 +16,11 @@ use crate::{
 /// Constructs and returns a gRPC PathInfoService.
 /// We also return memory-based {Blob,Directory}Service,
 /// as the consumer of this function accepts a 3-tuple.
-pub async fn make_grpc_path_info_service_client() -> super::BSDSPS {
+pub async fn make_grpc_path_info_service_client() -> (
+    impl BlobService,
+    impl DirectoryService,
+    GRPCPathInfoService<tonic::transport::Channel>,
+) {
     let (left, right) = tokio::io::duplex(64);
 
     let blob_service = blob_service();
@@ -47,18 +52,27 @@ pub async fn make_grpc_path_info_service_client() -> super::BSDSPS {
     // Create a client, connecting to the right side. The URI is unused.
     let mut maybe_right = Some(right);
 
-    let path_info_service = Box::new(GRPCPathInfoService::from_client(
-        PathInfoServiceClient::new(
-            Endpoint::try_from("http://[::]:50051")
-                .unwrap()
-                .connect_with_connector(tower::service_fn(move |_: Uri| {
-                    let right = maybe_right.take().unwrap();
-                    async move { Ok::<_, std::io::Error>(right) }
-                }))
-                .await
-                .unwrap(),
-        ),
+    let path_info_service = GRPCPathInfoService::from_client(PathInfoServiceClient::new(
+        Endpoint::try_from("http://[::]:50051")
+            .unwrap()
+            .connect_with_connector(tower::service_fn(move |_: Uri| {
+                let right = maybe_right.take().unwrap();
+                async move { Ok::<_, std::io::Error>(right) }
+            }))
+            .await
+            .unwrap(),
     ));
 
     (blob_service, directory_service, path_info_service)
 }
+
+#[cfg(all(feature = "cloud", feature = "integration"))]
+pub(crate) async fn make_bigtable_path_info_service(
+) -> crate::pathinfoservice::BigtablePathInfoService {
+    use crate::pathinfoservice::bigtable::BigtableParameters;
+    use crate::pathinfoservice::BigtablePathInfoService;
+
+    BigtablePathInfoService::connect(BigtableParameters::default_for_tests())
+        .await
+        .unwrap()
+}
diff --git a/tvix/store/src/utils.rs b/tvix/store/src/utils.rs
index e6e42f6ec4..bd3c65a779 100644
--- a/tvix/store/src/utils.rs
+++ b/tvix/store/src/utils.rs
@@ -9,6 +9,7 @@ use tvix_castore::{
     blobservice::{self, BlobService},
     directoryservice::{self, DirectoryService},
 };
+use url::Url;
 
 use crate::nar::{NarCalculationService, SimpleRenderer};
 use crate::pathinfoservice::{self, PathInfoService};
@@ -31,6 +32,7 @@ pub async fn construct_services(
         directoryservice::from_addr(directory_service_addr.as_ref())
             .await?
             .into();
+
     let path_info_service = pathinfoservice::from_addr(
         path_info_service_addr.as_ref(),
         blob_service.clone(),
@@ -38,11 +40,32 @@ pub async fn construct_services(
     )
     .await?;
 
-    // TODO: grpc client also implements NarCalculationService
-    let nar_calculation_service = Box::new(SimpleRenderer::new(
-        blob_service.clone(),
-        directory_service.clone(),
-    )) as Box<dyn NarCalculationService>;
+    // HACK: The grpc client also implements NarCalculationService, and we
+    // really want to use it (otherwise we'd need to fetch everything again for hashing).
+    // Until we revamped store composition and config, detect this special case here.
+    let nar_calculation_service: Box<dyn NarCalculationService> = {
+        use crate::pathinfoservice::GRPCPathInfoService;
+        use crate::proto::path_info_service_client::PathInfoServiceClient;
+
+        let url = Url::parse(path_info_service_addr.as_ref())
+            .map_err(|e| io::Error::other(e.to_string()))?;
+
+        if url.scheme().starts_with("grpc+") {
+            Box::new(GRPCPathInfoService::from_client(
+                PathInfoServiceClient::with_interceptor(
+                    tvix_castore::tonic::channel_from_url(&url)
+                        .await
+                        .map_err(|e| io::Error::other(e.to_string()))?,
+                    tvix_tracing::propagate::tonic::send_trace,
+                ),
+            ))
+        } else {
+            Box::new(SimpleRenderer::new(
+                blob_service.clone(),
+                directory_service.clone(),
+            )) as Box<dyn NarCalculationService>
+        }
+    };
 
     Ok((
         blob_service,
diff --git a/tvix/tracing/Cargo.toml b/tvix/tracing/Cargo.toml
new file mode 100644
index 0000000000..bc9a8c3c77
--- /dev/null
+++ b/tvix/tracing/Cargo.toml
@@ -0,0 +1,43 @@
+[package]
+name = "tvix-tracing"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+lazy_static = "1.4.0"
+tracing = { version = "0.1.40", features = ["max_level_trace", "release_max_level_debug"] }
+tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
+indicatif = "0.17.8"
+tracing-indicatif = "0.3.6"
+tokio = { version = "1.32.0" , features = ["sync", "rt"] }
+thiserror = "1.0.38"
+
+tracing-opentelemetry = { version = "0.23.0", optional = true }
+opentelemetry = { version = "0.22.0", optional = true }
+opentelemetry-otlp = { version = "0.15.0", optional = true }
+opentelemetry_sdk = { version = "0.22.1", features = ["rt-tokio"], optional = true }
+tracing-tracy = { version = "0.11.0", features = ["flush-on-exit"], optional = true }
+opentelemetry-http = { version = "0.11.0", optional = true }
+
+tonic = { version = "0.11.0", optional = true }
+http  = { version = "0.2.11", optional = true }
+
+[features]
+default = []
+otlp = [
+  "dep:tracing-opentelemetry",
+  "dep:opentelemetry",
+  "dep:opentelemetry-otlp",
+  "dep:opentelemetry_sdk",
+  "dep:opentelemetry-http"
+]
+tracy = [
+  "dep:tracing-tracy"
+]
+tonic = [
+  "dep:tonic",
+  "dep:http",
+]
+
+[lints]
+workspace = true
diff --git a/tvix/tracing/default.nix b/tvix/tracing/default.nix
new file mode 100644
index 0000000000..dd7dc200f2
--- /dev/null
+++ b/tvix/tracing/default.nix
@@ -0,0 +1,11 @@
+{ depot, lib, ... }:
+
+(depot.tvix.crates.workspaceMembers.tvix-tracing.build.override {
+  runTests = true;
+}).overrideAttrs (old: rec {
+  meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+  passthru = depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = [ "otlp" "tracy" ];
+  };
+})
diff --git a/tvix/tracing/src/lib.rs b/tvix/tracing/src/lib.rs
new file mode 100644
index 0000000000..b965ca4a3d
--- /dev/null
+++ b/tvix/tracing/src/lib.rs
@@ -0,0 +1,302 @@
+use indicatif::ProgressStyle;
+use lazy_static::lazy_static;
+use tokio::sync::{mpsc, oneshot};
+use tracing::Level;
+use tracing_indicatif::{filter::IndicatifFilter, IndicatifLayer};
+use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Layer};
+
+#[cfg(feature = "otlp")]
+use opentelemetry::{trace::Tracer, KeyValue};
+#[cfg(feature = "otlp")]
+use opentelemetry_sdk::{
+    propagation::TraceContextPropagator,
+    resource::{ResourceDetector, SdkProvidedResourceDetector},
+    trace::BatchConfigBuilder,
+    Resource,
+};
+#[cfg(feature = "tracy")]
+use tracing_tracy::TracyLayer;
+
+#[cfg(feature = "tonic")] // TODO or http
+pub mod propagate;
+
+lazy_static! {
+    pub static ref PB_PROGRESS_STYLE: ProgressStyle = ProgressStyle::with_template(
+        "{span_child_prefix} {wide_msg} {bar:10} ({elapsed}) {pos:>7}/{len:7}"
+    )
+    .expect("invalid progress template");
+    pub static ref PB_TRANSFER_STYLE: ProgressStyle = ProgressStyle::with_template(
+        "{span_child_prefix} {wide_msg} {binary_bytes:>7}/{binary_total_bytes:7}@{decimal_bytes_per_sec} ({elapsed}) {bar:10} "
+    )
+    .expect("invalid progress template");
+    pub static ref PB_SPINNER_STYLE: ProgressStyle = ProgressStyle::with_template(
+        "{span_child_prefix}{spinner} {wide_msg} ({elapsed}) {pos:>7}/{len:7}"
+    )
+    .expect("invalid progress template");
+}
+
+#[derive(thiserror::Error, Debug)]
+pub enum Error {
+    #[error(transparent)]
+    Init(#[from] tracing_subscriber::util::TryInitError),
+
+    #[error(transparent)]
+    MpscSend(#[from] mpsc::error::SendError<Option<oneshot::Sender<()>>>),
+
+    #[error(transparent)]
+    OneshotRecv(#[from] oneshot::error::RecvError),
+}
+
+#[derive(Clone)]
+pub struct TracingHandle {
+    tx: Option<mpsc::Sender<Option<oneshot::Sender<()>>>>,
+}
+
+impl TracingHandle {
+    /// This will flush possible attached tracing providers, e.g. otlp exported, if enabled.
+    /// If there is none enabled this will result in a noop.
+    ///
+    /// It will not wait until the flush is complete, but you can pass in an oneshot::Sender which
+    /// will receive a message once the flush is completed.
+    pub async fn flush(&self, msg: Option<oneshot::Sender<()>>) -> Result<(), Error> {
+        if let Some(tx) = &self.tx {
+            Ok(tx.send(msg).await?)
+        } else {
+            // If we have a message passed in we need to notify the receiver
+            if let Some(tx) = msg {
+                let _ = tx.send(());
+            }
+            Ok(())
+        }
+    }
+
+    /// This will flush all all attached tracing providers and will wait until the flush is completed.
+    /// If no tracing providers like otlp are attached then this will be a noop.
+    ///
+    /// This should only be called on a regular shutdown.
+    /// If you correctly need to shutdown tracing on ctrl_c use [force_shutdown](#method.force_shutdown)
+    /// otherwise you will get otlp errors.
+    pub async fn shutdown(&self) -> Result<(), Error> {
+        let (tx, rx) = tokio::sync::oneshot::channel();
+        self.flush(Some(tx)).await?;
+        rx.await?;
+        Ok(())
+    }
+
+    /// This will flush all all attached tracing providers and will wait until the flush is completed.
+    /// After this it will do some other necessary cleanup.
+    /// If no tracing providers like otlp are attached then this will be a noop.
+    ///
+    /// This should only be used if the tool received an ctrl_c otherwise you will get otlp errors.
+    /// If you need to shutdown tracing on a regular exit, you should use the [shutdown](#method.shutdown)
+    /// method.
+    pub async fn force_shutdown(&self) -> Result<(), Error> {
+        let (tx, rx) = tokio::sync::oneshot::channel();
+        self.flush(Some(tx)).await?;
+        rx.await?;
+
+        #[cfg(feature = "otlp")]
+        {
+            // Because of a bug within otlp we currently have to use spawn_blocking otherwise
+            // calling `shutdown_tracer_provider` can block forever. See
+            // https://github.com/open-telemetry/opentelemetry-rust/issues/1395#issuecomment-1953280335
+            //
+            // This still throws an error, if the tool exits regularly: "OpenTelemetry trace error
+            // occurred. oneshot canceled", but not having this leads to errors if we cancel with
+            // ctrl_c.
+            // So this should right now only be used on ctrl_c, for a regular exit use the
+            // [shutdown](#shutdown) method
+            let _ = tokio::task::spawn_blocking(move || {
+                opentelemetry::global::shutdown_tracer_provider();
+            })
+            .await;
+        }
+
+        Ok(())
+    }
+}
+
+pub struct TracingBuilder {
+    level: Level,
+    progess_bar: bool,
+
+    #[cfg(feature = "otlp")]
+    service_name: Option<&'static str>,
+}
+
+impl Default for TracingBuilder {
+    fn default() -> Self {
+        TracingBuilder {
+            level: Level::INFO,
+            progess_bar: false,
+
+            #[cfg(feature = "otlp")]
+            service_name: None,
+        }
+    }
+}
+
+impl TracingBuilder {
+    /// Set the log level for all layers: stderr und otlp if configured. RUST_LOG still has a
+    /// higher priority over this value.
+    pub fn level(mut self, level: Level) -> TracingBuilder {
+        self.level = level;
+        self
+    }
+
+    #[cfg(feature = "otlp")]
+    /// Enable otlp by setting a custom service_name
+    pub fn enable_otlp(mut self, service_name: &'static str) -> TracingBuilder {
+        self.service_name = Some(service_name);
+        self
+    }
+
+    /// Enable progress bar layer, default is disabled
+    pub fn enable_progressbar(mut self) -> TracingBuilder {
+        self.progess_bar = true;
+        self
+    }
+
+    /// This will setup tracing based on the configuration passed in.
+    /// It will setup a stderr writer output layer and a EnvFilter based on the provided log
+    /// level (RUST_LOG still has a higher priority over the configured value).
+    /// The EnvFilter will be applied to all configured layers, also otlp.
+    ///
+    /// It will also configure otlp if the feature is enabled and a service_name was provided. It
+    /// will then correctly setup a channel which is later used for flushing the provider.
+    pub fn build(self) -> Result<TracingHandle, Error> {
+        // Set up the tracing subscriber.
+        let indicatif_layer = IndicatifLayer::new().with_progress_style(PB_SPINNER_STYLE.clone());
+        let subscriber = tracing_subscriber::registry()
+            .with(
+                EnvFilter::builder()
+                    .with_default_directive(self.level.into())
+                    .from_env()
+                    .expect("invalid RUST_LOG"),
+            )
+            .with(
+                tracing_subscriber::fmt::Layer::new()
+                    .with_writer(indicatif_layer.get_stderr_writer())
+                    .compact(),
+            )
+            .with((self.progess_bar).then(|| {
+                indicatif_layer.with_filter(
+                    // only show progress for spans with indicatif.pb_show field being set
+                    IndicatifFilter::new(false),
+                )
+            }));
+
+        // Setup otlp if a service_name is configured
+        #[cfg(feature = "otlp")]
+        {
+            if let Some(service_name) = self.service_name {
+                // register a text map propagator for trace propagation
+                opentelemetry::global::set_text_map_propagator(TraceContextPropagator::new());
+
+                let (tracer, tx) = gen_otlp_tracer(service_name.to_string());
+                // Create a tracing layer with the configured tracer
+                let layer = tracing_opentelemetry::layer().with_tracer(tracer);
+
+                #[cfg(feature = "tracy")]
+                {
+                    subscriber
+                        .with(TracyLayer::default())
+                        .with(Some(layer))
+                        .try_init()?;
+                }
+
+                #[cfg(not(feature = "tracy"))]
+                {
+                    subscriber.with(Some(layer)).try_init()?;
+                }
+                return Ok(TracingHandle { tx: Some(tx) });
+            }
+        }
+        #[cfg(feature = "tracy")]
+        {
+            subscriber.with(TracyLayer::default()).try_init()?;
+        }
+        #[cfg(not(feature = "tracy"))]
+        {
+            subscriber.try_init()?;
+        }
+
+        Ok(TracingHandle { tx: None })
+    }
+}
+
+/// Returns an OTLP tracer, and the TX part of a channel, which can be used
+/// to request flushes (and signal back the completion of the flush).
+#[cfg(feature = "otlp")]
+fn gen_otlp_tracer(
+    service_name: String,
+) -> (
+    impl Tracer + tracing_opentelemetry::PreSampledTracer,
+    mpsc::Sender<Option<oneshot::Sender<()>>>,
+) {
+    let tracer = opentelemetry_otlp::new_pipeline()
+        .tracing()
+        .with_exporter(opentelemetry_otlp::new_exporter().tonic())
+        .with_batch_config(
+            BatchConfigBuilder::default()
+                // the default values for `max_export_batch_size` is set to 512, which we will fill
+                // pretty quickly, which will then result in an export. We want to make sure that
+                // the export is only done once the schedule is met and not as soon as 512 spans
+                // are collected.
+                .with_max_export_batch_size(4096)
+                // analog to default config `max_export_batch_size * 4`
+                .with_max_queue_size(4096 * 4)
+                // only force an export to the otlp collector every 10 seconds to reduce the amount
+                // of error messages if an otlp collector is not available
+                .with_scheduled_delay(std::time::Duration::from_secs(10))
+                .build(),
+        )
+        .with_trace_config(opentelemetry_sdk::trace::config().with_resource({
+            // use SdkProvidedResourceDetector.detect to detect resources,
+            // but replace the default service name with our default.
+            // https://github.com/open-telemetry/opentelemetry-rust/issues/1298
+            let resources = SdkProvidedResourceDetector.detect(std::time::Duration::from_secs(0));
+            // SdkProvidedResourceDetector currently always sets
+            // `service.name`, but we don't like its default.
+            if resources.get("service.name".into()).unwrap() == "unknown_service".into() {
+                resources.merge(&Resource::new([KeyValue::new(
+                    "service.name",
+                    service_name,
+                )]))
+            } else {
+                resources
+            }
+        }))
+        .install_batch(opentelemetry_sdk::runtime::Tokio)
+        .expect("Failed to install batch exporter using Tokio");
+
+    // Trace provider is need for later use like flushing the provider.
+    // Needs to be kept around for each message to rx we need to handle.
+    let tracer_provider = tracer
+        .provider()
+        .expect("Failed to get the tracer provider");
+
+    // Set up a channel for flushing trace providers later
+    let (tx, mut rx) = mpsc::channel::<Option<oneshot::Sender<()>>>(16);
+
+    // Spawning a task that listens on rx for any message. Once we receive a message we
+    // correctly call flush on the tracer_provider.
+    tokio::spawn(async move {
+        while let Some(m) = rx.recv().await {
+            // Because of a bug within otlp we currently have to use spawn_blocking
+            // otherwise will calling `force_flush` block forever, especially if the
+            // tool was closed with ctrl_c. See
+            // https://github.com/open-telemetry/opentelemetry-rust/issues/1395#issuecomment-1953280335
+            let _ = tokio::task::spawn_blocking({
+                let tracer_provider = tracer_provider.clone();
+                move || tracer_provider.force_flush()
+            })
+            .await;
+            if let Some(tx) = m {
+                let _ = tx.send(());
+            }
+        }
+    });
+
+    (tracer, tx)
+}
diff --git a/tvix/tracing/src/propagate/mod.rs b/tvix/tracing/src/propagate/mod.rs
new file mode 100644
index 0000000000..42c532e9d8
--- /dev/null
+++ b/tvix/tracing/src/propagate/mod.rs
@@ -0,0 +1,9 @@
+#[cfg(feature = "tonic")]
+pub mod tonic;
+
+// TODO: Helper library for reqwest. We could use
+// https://github.com/TrueLayer/reqwest-middleware/tree/main/reqwest-tracing to realise this
+
+// TODO: Helper library for axum or another http server, see
+// https://github.com/hseeberger/hello-tracing-rs/blob/main/hello-tracing-common/src/otel/http.rs
+// as an example and we can reuse tonic::accept_trace fun, at least for a tower::ServiceBuilder
diff --git a/tvix/tracing/src/propagate/tonic.rs b/tvix/tracing/src/propagate/tonic.rs
new file mode 100644
index 0000000000..af643eb70e
--- /dev/null
+++ b/tvix/tracing/src/propagate/tonic.rs
@@ -0,0 +1,59 @@
+use tonic::{
+    metadata::{MetadataKey, MetadataMap, MetadataValue},
+    Status,
+};
+use tracing::{warn, Span};
+
+#[cfg(feature = "otlp")]
+use opentelemetry::{global, propagation::Injector};
+#[cfg(feature = "otlp")]
+use opentelemetry_http::HeaderExtractor;
+#[cfg(feature = "otlp")]
+use tracing_opentelemetry::OpenTelemetrySpanExt;
+
+/// Trace context propagation: associate the current span with the otlp trace of the given request,
+/// if any and valid. This only sets the parent trace if the otlp feature is also enabled.
+pub fn accept_trace<B>(request: http::Request<B>) -> http::Request<B> {
+    // we only extract and set a parent trace if otlp feature is enabled, otherwise this feature is
+    // an noop and we return the request as is
+    #[cfg(feature = "otlp")]
+    {
+        // Current context, if no or invalid data is received.
+        let parent_context = global::get_text_map_propagator(|propagator| {
+            propagator.extract(&HeaderExtractor(request.headers()))
+        });
+        Span::current().set_parent(parent_context);
+    }
+    request
+}
+
+#[cfg(feature = "otlp")]
+struct MetadataInjector<'a>(&'a mut MetadataMap);
+
+#[cfg(feature = "otlp")]
+impl Injector for MetadataInjector<'_> {
+    fn set(&mut self, key: &str, value: String) {
+        match MetadataKey::from_bytes(key.as_bytes()) {
+            Ok(key) => match MetadataValue::try_from(&value) {
+                Ok(value) => {
+                    self.0.insert(key, value);
+                }
+                Err(error) => warn!(value, error = format!("{error:#}"), "parse metadata value"),
+            },
+            Err(error) => warn!(key, error = format!("{error:#}"), "parse metadata key"),
+        }
+    }
+}
+
+/// Trace context propagation: send the trace context by injecting it into the metadata of the given
+/// request. This only injects the current span if the otlp feature is also enabled.
+pub fn send_trace<T>(mut request: tonic::Request<T>) -> Result<tonic::Request<T>, Status> {
+    #[cfg(feature = "otlp")]
+    {
+        global::get_text_map_propagator(|propagator| {
+            let context = Span::current().context();
+            propagator.inject_context(&context, &mut MetadataInjector(request.metadata_mut()))
+        });
+    }
+    Ok(request)
+}
diff --git a/tvix/utils.nix b/tvix/utils.nix
new file mode 100644
index 0000000000..5edc1dc2e8
--- /dev/null
+++ b/tvix/utils.nix
@@ -0,0 +1,42 @@
+{ lib, depot, ... }:
+
+{
+  mkFeaturePowerset = { crateName, features, override ? { } }:
+    let
+      powerset = xs:
+        let
+          addElement = set: element:
+            set ++ map (e: [ element ] ++ e) set;
+        in
+        lib.foldl' addElement [ [ ] ] xs;
+    in
+    lib.listToAttrs (map
+      (featuresPowerset: {
+        name = if featuresPowerset != [ ] then "with-features-${lib.concatStringsSep "-" featuresPowerset}" else "no-features";
+        value = depot.tvix.crates.workspaceMembers.${crateName}.build.override (old: {
+          runTests = true;
+          features = featuresPowerset;
+        } // (if lib.isFunction override then override old else override)
+        );
+      })
+      (powerset features));
+
+  # Filters the given source, only keeping files related to the build, preventing unnecessary rebuilds.
+  # Includes src in the root, all other .rs files, as well as Cargo.toml.
+  # Additional files to be included can be specified in extraFileset.
+  filterRustCrateSrc =
+    { root # The original src
+    , extraFileset ? null # Additional filesets to include (e.g. fileFilter for proto files)
+    }:
+    lib.fileset.toSource {
+      inherit root;
+      fileset = (lib.fileset.intersection
+        (lib.fileset.fromSource root) # We build our final fileset from the original src
+        (lib.fileset.unions ([
+          (root + "/src")
+          (lib.fileset.fileFilter (f: f.hasExt "rs") root)
+          # We assume that every Rust crate will at a minimum have .rs files and a Cargo.toml
+          (lib.fileset.fileFilter (f: f.name == "Cargo.toml") root)
+        ] ++ lib.optional (extraFileset != null) extraFileset)));
+    };
+}
diff --git a/tvix/website/landing-en.md b/tvix/website/landing-en.md
index 61a011dee9..f677f20f2f 100644
--- a/tvix/website/landing-en.md
+++ b/tvix/website/landing-en.md
@@ -15,7 +15,7 @@ There are several projects within Tvix, such as:
 * `//tvix/castore` - subtree storage/transfer in a content-addressed fashion
 * `//tvix/cli` - preliminary REPL & CLI implementation for Tvix
 * `//tvix/eval` - an implementation of the Nix programming language
-* `//tvix/nar-bridge` - a HTTP webserver providing a Nix HTTP Binary Cache interface in front of a tvix-store
+* `//tvix/nar-bridge[-go]` - a HTTP webserver providing a Nix HTTP Binary Cache interface in front of a tvix-store
 * `//tvix/nix-compat` - a Rust library for compatibility with C++ Nix, features like encodings and hashing schemes and formats
 * `//tvix/serde` - a Rust library for using the Nix language for app configuration
 * `//tvix/store` - a "filesystem" linking Nix store paths and metadata with the content-addressed layer
diff --git a/users/Profpatsch/.hlint.yaml b/users/Profpatsch/.hlint.yaml
index f00f78c525..12b7c61b70 100644
--- a/users/Profpatsch/.hlint.yaml
+++ b/users/Profpatsch/.hlint.yaml
@@ -34,6 +34,8 @@
 - ignore: { name: Use tuple-section }
 - ignore: { name: Use forM_ }
 - ignore: { name: Functor law }
+- ignore: { name: Use maybe }
+
 # fst and snd are usually a code smell and should be explicit matches, _naming the ignored side.
 - ignore: { name: Use fst }
 - ignore: { name: Use snd }
diff --git a/users/Profpatsch/my-prelude/default.nix b/users/Profpatsch/my-prelude/default.nix
index e445115416..4bca8ea49f 100644
--- a/users/Profpatsch/my-prelude/default.nix
+++ b/users/Profpatsch/my-prelude/default.nix
@@ -7,6 +7,7 @@ pkgs.haskellPackages.mkDerivation {
   src = depot.users.Profpatsch.exactSource ./. [
     ./my-prelude.cabal
     ./src/Aeson.hs
+    ./src/Arg.hs
     ./src/AtLeast.hs
     ./src/MyPrelude.hs
     ./src/Test.hs
diff --git a/users/Profpatsch/my-prelude/my-prelude.cabal b/users/Profpatsch/my-prelude/my-prelude.cabal
index 95a8399f37..2f7882a526 100644
--- a/users/Profpatsch/my-prelude/my-prelude.cabal
+++ b/users/Profpatsch/my-prelude/my-prelude.cabal
@@ -59,6 +59,7 @@ library
     exposed-modules:
       MyPrelude
       Aeson
+      Arg
       AtLeast
       Test
       Postgres.Decoder
diff --git a/users/Profpatsch/my-prelude/src/Arg.hs b/users/Profpatsch/my-prelude/src/Arg.hs
new file mode 100644
index 0000000000..a6ffa90924
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Arg.hs
@@ -0,0 +1,34 @@
+module Arg where
+
+import Data.String (IsString)
+import GHC.Exts (IsList)
+import GHC.TypeLits (Symbol)
+
+-- | Wrap a function argument into this helper to give it a better description for the caller without disturbing the callsite too much.
+--
+-- This has instances for IsString and Num, meaning if the caller is usually a string or number literal, it should Just Work.
+--
+-- e.g.
+--
+-- @
+-- myFoo :: Arg "used as the name in error message" Text -> IO ()
+-- myFoo (Arg name) = …
+-- @
+--
+-- Will display the description in the inferred type of the callsite.
+--
+-- Due to IsString you can call @myFoo@ like
+--
+-- @myFoo "name in error"@
+--
+-- This is mostly intended for literals, if you want to wrap arbitrary data, use @Label@.
+newtype Arg (description :: Symbol) a = Arg {unArg :: a}
+  deriving newtype
+    ( Show,
+      Eq,
+      IsString,
+      IsList,
+      Num,
+      Monoid,
+      Semigroup
+    )
diff --git a/users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs b/users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs
index f83a6d7fcf..2c9a48d134 100644
--- a/users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs
+++ b/users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs
@@ -5,13 +5,20 @@
 
 module Postgres.MonadPostgres where
 
+import Arg
 import AtLeast (AtLeast)
 import Control.Exception
+  ( Exception (displayException),
+    Handler (Handler),
+    catches,
+    try,
+  )
 import Control.Foldl qualified as Fold
 import Control.Monad.Logger.CallStack (MonadLogger, logDebug, logWarn)
 import Control.Monad.Reader (MonadReader (ask), ReaderT (..))
 import Control.Monad.Trans.Resource
 import Data.Aeson (FromJSON)
+import Data.ByteString qualified as ByteString
 import Data.Error.Tree
 import Data.HashMap.Strict qualified as HashMap
 import Data.Int (Int64)
@@ -28,8 +35,10 @@ import Database.PostgreSQL.Simple.FromRow qualified as PG
 import Database.PostgreSQL.Simple.ToField (ToField)
 import Database.PostgreSQL.Simple.ToRow (ToRow (toRow))
 import Database.PostgreSQL.Simple.Types (Query (..))
+import GHC.IO.Handle (Handle)
 import GHC.Records (getField)
 import Label
+import OpenTelemetry.Trace.Core (NewEvent (newEventName))
 import OpenTelemetry.Trace.Core qualified as Otel hiding (inSpan, inSpan')
 import OpenTelemetry.Trace.Monad qualified as Otel
 import PossehlAnalyticsPrelude
@@ -39,7 +48,9 @@ import Pretty (showPretty)
 import Seconds
 import System.Exit (ExitCode (..))
 import Tool
-import UnliftIO (MonadUnliftIO (withRunInIO))
+import UnliftIO (MonadUnliftIO (withRunInIO), bracket, hClose, mask_)
+import UnliftIO.Concurrent (forkIO)
+import UnliftIO.Process (ProcessHandle)
 import UnliftIO.Process qualified as Process
 import UnliftIO.Resource qualified as Resource
 import Prelude hiding (init, span)
@@ -357,7 +368,7 @@ handlePGException ::
   ( ToRow params,
     MonadUnliftIO m,
     MonadLogger m,
-    HasField "pgFormat" tools Tool
+    HasField "pgFormat" tools PgFormatPool
   ) =>
   tools ->
   Text ->
@@ -405,6 +416,104 @@ withPGTransaction connPool f =
     connPool
     (\conn -> Postgres.withTransaction conn (f conn))
 
+-- | `pg_formatter` is a perl script that does not support any kind of streaming.
+-- Thus we initialize a pool with a bunch of these scripts running, waiting for input. This way we can have somewhat fast SQL formatting.
+--
+-- Call `initPgFormatPool` to initialize, then use `runPgFormat` to format some sql.
+data PgFormatPool = PgFormatPool
+  { pool :: Pool PgFormatProcess,
+    pgFormat :: Tool
+  }
+
+data PgFormatProcess = PgFormatProcess
+  { stdinHdl :: Handle,
+    stdoutHdl :: Handle,
+    stderrHdl :: Handle,
+    procHdl :: ProcessHandle,
+    startedAt :: Otel.Timestamp
+  }
+
+initPgFormatPool :: (HasField "pgFormat" tools Tool) => tools -> IO PgFormatPool
+initPgFormatPool tools = do
+  pool <-
+    Pool.newPool
+      ( Pool.defaultPoolConfig
+          (pgFormatStartCommandWaitForInput tools)
+          ( \pgFmt -> do
+              Process.terminateProcess pgFmt.procHdl
+              -- make sure we don’t leave any zombies
+              _ <- forkIO $ do
+                _ <- Process.waitForProcess pgFmt.procHdl
+                pure ()
+              pure ()
+          )
+          -- unused resource time
+          100
+          -- number of resources
+          10
+      )
+
+  -- fill the pool with resources
+  let go =
+        Pool.tryWithResource pool (\_ -> go) >>= \case
+          Nothing -> pure ()
+          Just () -> pure ()
+  _ <- go
+  pure (PgFormatPool {pool, pgFormat = tools.pgFormat})
+
+destroyPgFormatPool :: PgFormatPool -> IO ()
+destroyPgFormatPool pool = Pool.destroyAllResources pool.pool
+
+-- | Get the oldest resource from the pool, or stop if you find a resource that’s older than `cutoffPointMs`.
+takeOldestResource :: PgFormatPool -> Arg "cutoffPointMs" Integer -> IO (PgFormatProcess, Pool.LocalPool PgFormatProcess)
+takeOldestResource pool cutoffPointMs = do
+  now <- Otel.getTimestamp
+  mask_ $ do
+    a <- Pool.takeResource pool.pool
+    (putBack, res) <- go now [] a
+    -- make sure we don’t leak any resources we didn’t use in the end
+    for_ putBack $ \(x, xLocal) -> Pool.putResource xLocal x
+    pure res
+  where
+    mkMs ts = (ts & Otel.timestampNanoseconds & toInteger) `div` 1000_000
+    go now putBack a@(a', _) =
+      if abs (mkMs now - mkMs a'.startedAt) > cutoffPointMs.unArg
+        then pure (putBack, a)
+        else
+          Pool.tryTakeResource pool.pool >>= \case
+            Nothing -> pure (putBack, a)
+            Just b@(b', _) -> do
+              if a'.startedAt < b'.startedAt
+                then go now (b : putBack) a
+                else go now (a : putBack) b
+
+-- | Format the given SQL with pg_formatter. Will use the pool of already running formatters to speed up execution.
+runPgFormat :: PgFormatPool -> ByteString -> IO (T3 "exitCode" ExitCode "formatted" ByteString "stderr" ByteString)
+runPgFormat pool sqlStatement = do
+  bracket
+    (takeOldestResource pool 200)
+    ( \(a, localPool) -> do
+        -- we always destroy the resource, because the process exited
+        Pool.destroyResource pool.pool localPool a
+        -- create a new process to keep the pool “warm”
+        new <- pgFormatStartCommandWaitForInput pool
+        Pool.putResource localPool new
+    )
+    ( \(pgFmt, _localPool) -> do
+        ByteString.hPut pgFmt.stdinHdl sqlStatement
+        -- close stdin to make pg_formatter format (it exits …)
+        -- issue: https://github.com/darold/pgFormatter/issues/333
+        hClose pgFmt.stdinHdl
+        formatted <- ByteString.hGetContents pgFmt.stdoutHdl
+        errs <- ByteString.hGetContents pgFmt.stderrHdl
+        exitCode <- Process.waitForProcess pgFmt.procHdl
+        pure $
+          T3
+            (label @"exitCode" exitCode)
+            (label @"formatted" formatted)
+            (label @"stderr" errs)
+    )
+
 runPGTransactionImpl ::
   (MonadUnliftIO m) =>
   m (Pool Postgres.Connection) ->
@@ -418,7 +527,7 @@ runPGTransactionImpl zoom (Transaction transaction) = do
       unliftIO $ runReaderT transaction conn
 
 executeImpl ::
-  (ToRow params, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools Tool, Otel.MonadTracer m) =>
+  (ToRow params, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools PgFormatPool, Otel.MonadTracer m) =>
   m tools ->
   m DebugLogDatabaseQueries ->
   Query ->
@@ -436,7 +545,7 @@ executeImpl zoomTools zoomDebugLogDatabaseQueries qry params =
       >>= toNumberOfRowsAffected "executeImpl"
 
 executeImpl_ ::
-  (MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools Tool, Otel.MonadTracer m) =>
+  (MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools PgFormatPool, Otel.MonadTracer m) =>
   m tools ->
   m DebugLogDatabaseQueries ->
   Query ->
@@ -453,14 +562,14 @@ executeImpl_ zoomTools zoomDebugLogDatabaseQueries qry =
       >>= toNumberOfRowsAffected "executeImpl_"
 
 executeManyImpl ::
-  (ToRow params, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools Tool, Otel.MonadTracer m) =>
+  (ToRow params, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools PgFormatPool, Otel.MonadTracer m) =>
   m tools ->
   m DebugLogDatabaseQueries ->
   Query ->
   NonEmpty params ->
   Transaction m (Label "numberOfRowsAffected" Natural)
 executeManyImpl zoomTools zoomDebugLogDatabaseQueries qry params =
-  Otel.inSpan' "Postgres Query (execute)" Otel.defaultSpanArguments $ \span -> do
+  Otel.inSpan' "Postgres Query (executeMany)" Otel.defaultSpanArguments $ \span -> do
     tools <- lift @Transaction zoomTools
     logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
     traceQueryIfEnabled tools span logDatabaseQueries qry (HasMultiParams params)
@@ -480,7 +589,7 @@ toNumberOfRowsAffected functionName i64 =
     <&> label @"numberOfRowsAffected"
 
 executeManyReturningWithImpl ::
-  (ToRow params, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools Tool, Otel.MonadTracer m) =>
+  (ToRow params, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools PgFormatPool, Otel.MonadTracer m) =>
   m tools ->
   m DebugLogDatabaseQueries ->
   Query ->
@@ -489,7 +598,7 @@ executeManyReturningWithImpl ::
   Transaction m [r]
 {-# INLINE executeManyReturningWithImpl #-}
 executeManyReturningWithImpl zoomTools zoomDebugLogDatabaseQueries qry params (Decoder fromRow) = do
-  Otel.inSpan' "Postgres Query (execute)" Otel.defaultSpanArguments $ \span -> do
+  Otel.inSpan' "Postgres Query (executeManyReturning)" Otel.defaultSpanArguments $ \span -> do
     tools <- lift @Transaction zoomTools
     logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
     traceQueryIfEnabled tools span logDatabaseQueries qry (HasMultiParams params)
@@ -501,7 +610,7 @@ foldRowsWithAccImpl ::
   ( ToRow params,
     MonadUnliftIO m,
     MonadLogger m,
-    HasField "pgFormat" tools Tool,
+    HasField "pgFormat" tools PgFormatPool,
     Otel.MonadTracer m
   ) =>
   m tools ->
@@ -535,7 +644,7 @@ foldRowsWithAccImpl zoomTools zoomDebugLogDatabaseQueries qry params (Decoder ro
       )
 
 pgFormatQueryNoParams' ::
-  (MonadIO m, MonadLogger m, HasField "pgFormat" tools Tool) =>
+  (MonadIO m, MonadLogger m, HasField "pgFormat" tools PgFormatPool) =>
   tools ->
   Query ->
   Transaction m Text
@@ -571,7 +680,7 @@ queryWithImpl ::
   ( ToRow params,
     MonadUnliftIO m,
     MonadLogger m,
-    HasField "pgFormat" tools Tool,
+    HasField "pgFormat" tools PgFormatPool,
     Otel.MonadTracer m
   ) =>
   m tools ->
@@ -582,7 +691,7 @@ queryWithImpl ::
   Transaction m [r]
 {-# INLINE queryWithImpl #-}
 queryWithImpl zoomTools zoomDebugLogDatabaseQueries qry params (Decoder fromRow) = do
-  Otel.inSpan' "Postgres Query (execute)" Otel.defaultSpanArguments $ \span -> do
+  Otel.inSpan' "Postgres Query (queryWith)" Otel.defaultSpanArguments $ \span -> do
     tools <- lift @Transaction zoomTools
     logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
     traceQueryIfEnabled tools span logDatabaseQueries qry (HasSingleParam params)
@@ -593,7 +702,7 @@ queryWithImpl zoomTools zoomDebugLogDatabaseQueries qry params (Decoder fromRow)
 queryWithImpl_ ::
   ( MonadUnliftIO m,
     MonadLogger m,
-    HasField "pgFormat" tools Tool
+    HasField "pgFormat" tools PgFormatPool
   ) =>
   m tools ->
   Query ->
@@ -619,7 +728,7 @@ pgFormatQuery' ::
   ( MonadIO m,
     ToRow params,
     MonadLogger m,
-    HasField "pgFormat" tools Tool
+    HasField "pgFormat" tools PgFormatPool
   ) =>
   tools ->
   Query ->
@@ -633,7 +742,7 @@ pgFormatQueryMany' ::
   ( MonadIO m,
     ToRow params,
     MonadLogger m,
-    HasField "pgFormat" tools Tool
+    HasField "pgFormat" tools PgFormatPool
   ) =>
   tools ->
   Query ->
@@ -650,33 +759,58 @@ postgresToolsParser = label @"pgFormat" <$> readTool "pg_format"
 pgFormatQueryByteString ::
   ( MonadIO m,
     MonadLogger m,
-    HasField "pgFormat" tools Tool
+    HasField "pgFormat" tools PgFormatPool
   ) =>
   tools ->
   ByteString ->
   m Text
 pgFormatQueryByteString tools queryBytes = do
+  res <-
+    liftIO $
+      runPgFormat
+        tools.pgFormat
+        (queryBytes)
+  case res.exitCode of
+    ExitSuccess -> pure (res.formatted & bytesToTextUtf8Lenient)
+    ExitFailure status -> do
+      logWarn [fmt|pg_format failed with status {status} while formatting the query, using original query string. Is there a syntax error?|]
+      logDebug
+        ( prettyErrorTree
+            ( nestedMultiError
+                "pg_format output"
+                ( nestedError "stdout" (singleError (res.formatted & bytesToTextUtf8Lenient & newError))
+                    :| [(nestedError "stderr" (singleError (res.stderr & bytesToTextUtf8Lenient & newError)))]
+                )
+            )
+        )
+      logDebug [fmt|pg_format stdout: stderr|]
+      pure (queryBytes & bytesToTextUtf8Lenient)
+
+pgFormatStartCommandWaitForInput ::
+  ( MonadIO m,
+    HasField "pgFormat" tools Tool,
+    MonadFail m
+  ) =>
+  tools ->
+  m PgFormatProcess
+pgFormatStartCommandWaitForInput tools = do
   do
-    (exitCode, stdout, stderr) <-
-      Process.readProcessWithExitCode
-        tools.pgFormat.toolPath
-        ["-"]
-        (queryBytes & bytesToTextUtf8Lenient & textToString)
-    case exitCode of
-      ExitSuccess -> pure (stdout & stringToText)
-      ExitFailure status -> do
-        logWarn [fmt|pg_format failed with status {status} while formatting the query, using original query string. Is there a syntax error?|]
-        logDebug
-          ( prettyErrorTree
-              ( nestedMultiError
-                  "pg_format output"
-                  ( nestedError "stdout" (singleError (stdout & stringToText & newError))
-                      :| [(nestedError "stderr" (singleError (stderr & stringToText & newError)))]
-                  )
-              )
+    startedAt <- Otel.getTimestamp
+    (Just stdinHdl, Just stdoutHdl, Just stderrHdl, procHdl) <-
+      Process.createProcess
+        ( ( Process.proc
+              tools.pgFormat.toolPath
+              [ "--no-rcfile",
+                "-"
+              ]
           )
-        logDebug [fmt|pg_format stdout: stderr|]
-        pure (queryBytes & bytesToTextUtf8Lenient)
+            { Process.std_in = Process.CreatePipe,
+              Process.std_out = Process.CreatePipe,
+              Process.std_err = Process.CreatePipe
+            }
+        )
+
+    pure PgFormatProcess {..}
 
 data DebugLogDatabaseQueries
   = -- | Do not log the database queries
@@ -697,7 +831,7 @@ traceQueryIfEnabled ::
   ( ToRow params,
     MonadUnliftIO m,
     MonadLogger m,
-    HasField "pgFormat" tools Tool,
+    HasField "pgFormat" tools PgFormatPool,
     Otel.MonadTracer m
   ) =>
   tools ->
@@ -708,20 +842,25 @@ traceQueryIfEnabled ::
   Transaction m ()
 traceQueryIfEnabled tools span logDatabaseQueries qry params = do
   -- In case we have query logging enabled, we want to do that
-  let formattedQuery = case params of
-        HasNoParams -> pgFormatQueryNoParams' tools qry
-        HasSingleParam p -> pgFormatQuery' tools qry p
-        HasMultiParams ps -> pgFormatQueryMany' tools qry ps
+  let formattedQuery = do
+        withEvent
+          span
+          "Query Format start"
+          "Query Format end"
+          $ case params of
+            HasNoParams -> pgFormatQueryNoParams' tools qry
+            HasSingleParam p -> pgFormatQuery' tools qry p
+            HasMultiParams ps -> pgFormatQueryMany' tools qry ps
+
   let doLog errs =
         Otel.addAttributes
           span
           $ HashMap.fromList
           $ ( ("_.postgres.query", Otel.toAttribute @Text errs.query)
                 : ( errs.explain
-                      & foldMap
-                        ( \ex ->
-                            [("_.postgres.explain", Otel.toAttribute @Text ex)]
-                        )
+                      & \case
+                        Nothing -> []
+                        Just ex -> [("_.postgres.explain", Otel.toAttribute @Text ex)]
                   )
             )
   let doExplain = do
@@ -750,6 +889,37 @@ traceQueryIfEnabled tools span logDatabaseQueries qry params = do
       ex <- doExplain
       doLog (T2 (label @"query" q) (label @"explain" (Just ex)))
 
+-- | Add a start and end event to the span, and figure out how long the difference was.
+--
+-- This is more lightweight than starting an extra span for timing things.
+withEvent :: (MonadIO f) => Otel.Span -> Text -> Text -> f b -> f b
+withEvent span start end act = do
+  let mkMs ts = (ts & Otel.timestampNanoseconds & toInteger) `div` 1000_000
+  s <- Otel.getTimestamp
+  Otel.addEvent
+    span
+    ( Otel.NewEvent
+        { newEventName = start,
+          newEventAttributes = mempty,
+          newEventTimestamp = Just s
+        }
+    )
+  res <- act
+  e <- Otel.getTimestamp
+  let tookMs =
+        (mkMs e - mkMs s)
+          -- should be small enough
+          & fromInteger @Int
+  Otel.addEvent
+    span
+    ( Otel.NewEvent
+        { newEventName = end,
+          newEventAttributes = HashMap.fromList [("took ms", Otel.toAttribute tookMs)],
+          newEventTimestamp = Just e
+        }
+    )
+  pure res
+
 instance (ToField t1) => ToRow (Label l1 t1) where
   toRow t2 = toRow $ PG.Only $ getField @l1 t2
 
diff --git a/users/Profpatsch/openlab-tools/src/OpenlabTools.hs b/users/Profpatsch/openlab-tools/src/OpenlabTools.hs
index 9fe51aba18..16f1b626ac 100644
--- a/users/Profpatsch/openlab-tools/src/OpenlabTools.hs
+++ b/users/Profpatsch/openlab-tools/src/OpenlabTools.hs
@@ -151,12 +151,12 @@ runApp = withTracer $ \tracer -> do
                                 )
                               ]
                         if
-                            -- If the last cache update is newer or equal to the requested version, we can tell the browser it’s fine
-                            | Just modifiedSince <- req'.ifModifiedSince,
-                              modifiedSince >= new.lastModified ->
-                                pure $ Wai.responseLBS Http.status304 cacheToHeaders ""
-                            | otherwise ->
-                                pure $ h cacheToHeaders (new.result & toLazyBytes)
+                          -- If the last cache update is newer or equal to the requested version, we can tell the browser it’s fine
+                          | Just modifiedSince <- req'.ifModifiedSince,
+                            modifiedSince >= new.lastModified ->
+                              pure $ Wai.responseLBS Http.status304 cacheToHeaders ""
+                          | otherwise ->
+                              pure $ h cacheToHeaders (new.result & toLazyBytes)
                     )
               }
           ]
@@ -198,7 +198,7 @@ runApp = withTracer $ \tracer -> do
         (Parse.maybe $ Parse.fieldParser parseHeaderTime)
         & rmap (fmap mkSecondTime)
 
-parseRequest :: (MonadThrow f, MonadIO f) => Otel.Span -> Parse from a -> from -> f a
+parseRequest :: (MonadThrow f) => Otel.Span -> Parse from a -> from -> f a
 parseRequest span parser req =
   Parse.runParse "Unable to parse the HTTP request" parser req
     & assertM span id
@@ -220,9 +220,9 @@ heatmap = do
       t
         & firstSection (match (Soup.TagOpen ("") [("class", "heatmap")]))
         >>= firstSection (match (Soup.TagOpen "table" []))
-        <&> getTable
-        <&> (<> htmlToTags [hsx|<figcaption>source: <a href={mapallSpaceOla} target="_blank">mapall.space</a></figcaption>|])
-        <&> wrapTagStream (T2 (label @"el" "figure") (label @"attrs" []))
+          <&> getTable
+          <&> (<> htmlToTags [hsx|<figcaption>source: <a href={mapallSpaceOla} target="_blank">mapall.space</a></figcaption>|])
+          <&> wrapTagStream (T2 (label @"el" "figure") (label @"attrs" []))
 
     -- get the table from opening tag to closing tag (allowing nested tables)
     getTable = go 0
@@ -310,8 +310,8 @@ runHandlers runApplication handlers = do
 inSpan :: (MonadUnliftIO m, Otel.MonadTracer m) => Text -> m a -> m a
 inSpan name = Otel.inSpan name Otel.defaultSpanArguments
 
-inSpan' :: (MonadUnliftIO m, Otel.MonadTracer m) => Text -> (Otel.Span -> m a) -> m a
--- inSpan' name =  Otel.inSpan' name Otel.defaultSpanArguments
+inSpan' :: Text -> (Otel.Span -> m a) -> m a
+-- inSpan' name = Otel.inSpan' name Otel.defaultSpanArguments
 inSpan' _name act = act (error "todo telemetry disabled")
 
 zipT2 ::
@@ -379,17 +379,17 @@ httpJson opts span parser req = do
                   <&> Wai.parseContentType
                   <&> (\(ct, _mimeAttributes) -> ct)
           if
-              | statusCode == 200,
-                Just ct <- contentType,
-                ct == opts'.contentType ->
-                  Right $ (resp & Http.responseBody)
-              | statusCode == 200,
-                Just otherType <- contentType ->
-                  Left [fmt|Server returned a non-json body, with content-type "{otherType}"|]
-              | statusCode == 200,
-                Nothing <- contentType ->
-                  Left [fmt|Server returned a body with unspecified content type|]
-              | code <- statusCode -> Left [fmt|Server returned an non-200 error code, code {code}: {resp & showPretty}|]
+            | statusCode == 200,
+              Just ct <- contentType,
+              ct == opts'.contentType ->
+                Right $ (resp & Http.responseBody)
+            | statusCode == 200,
+              Just otherType <- contentType ->
+                Left [fmt|Server returned a non-json body, with content-type "{otherType}"|]
+            | statusCode == 200,
+              Nothing <- contentType ->
+                Left [fmt|Server returned a body with unspecified content type|]
+            | code <- statusCode -> Left [fmt|Server returned an non-200 error code, code {code}: {resp & showPretty}|]
       )
     >>= assertM
       span
@@ -398,7 +398,7 @@ httpJson opts span parser req = do
             & first (Json.parseErrorTree "could not parse redacted response")
       )
 
-assertM :: (MonadThrow f, MonadIO f) => Otel.Span -> (t -> Either ErrorTree a) -> t -> f a
+assertM :: (MonadThrow f) => Otel.Span -> (t -> Either ErrorTree a) -> t -> f a
 assertM span f v = case f v of
   Right a -> pure a
   Left err -> appThrowTree span err
@@ -419,7 +419,7 @@ data Cache a = Cache
     lastModified :: !SecondTime,
     result :: !a
   }
-  deriving (Show)
+  deriving stock (Show)
 
 newCache :: Text -> a -> IO (TVar (Cache a))
 newCache name result = do
@@ -528,8 +528,8 @@ recordException span dat = liftIO $ do
         ..
       }
 
-appThrowTree :: (MonadThrow m, MonadIO m) => Otel.Span -> ErrorTree -> m a
-appThrowTree span exc = do
+appThrowTree :: (MonadThrow m) => Otel.Span -> ErrorTree -> m a
+appThrowTree _span exc = do
   let msg = prettyErrorTree exc
   -- recordException
   --   span
@@ -539,7 +539,7 @@ appThrowTree span exc = do
   --   )
   throwM $ AppException msg
 
-orAppThrowTree :: (MonadThrow m, MonadIO m) => Otel.Span -> Either ErrorTree a -> m a
+orAppThrowTree :: (MonadThrow m) => Otel.Span -> Either ErrorTree a -> m a
 orAppThrowTree span = \case
   Left err -> appThrowTree span err
   Right a -> pure a
diff --git a/users/Profpatsch/shell.nix b/users/Profpatsch/shell.nix
index b5095d476f..ec3326fe86 100644
--- a/users/Profpatsch/shell.nix
+++ b/users/Profpatsch/shell.nix
@@ -45,7 +45,7 @@ pkgs.mkShell {
       h.unix
       h.tagsoup
       h.attoparsec
-      h.iCalendar
+      # h.iCalendar
       h.case-insensitive
       h.hscolour
       h.nicify-lib
diff --git a/users/Profpatsch/whatcd-resolver/src/AppT.hs b/users/Profpatsch/whatcd-resolver/src/AppT.hs
index 7afd430745..3232004122 100644
--- a/users/Profpatsch/whatcd-resolver/src/AppT.hs
+++ b/users/Profpatsch/whatcd-resolver/src/AppT.hs
@@ -19,16 +19,15 @@ import OpenTelemetry.Trace.Monad qualified as Otel
 import PossehlAnalyticsPrelude
 import Postgres.MonadPostgres
 import System.IO qualified as IO
-import Tool (Tool)
 import UnliftIO
 import Prelude hiding (span)
 
 data Context = Context
   { config :: Label "logDatabaseQueries" DebugLogDatabaseQueries,
     tracer :: Otel.Tracer,
-    pgFormat :: Tool,
+    pgFormat :: PgFormatPool,
     pgConnPool :: Pool Postgres.Connection,
-    transmissionSessionId :: MVar ByteString
+    transmissionSessionId :: IORef (Maybe ByteString)
   }
 
 newtype AppT m a = AppT {unAppT :: ReaderT Context m a}
diff --git a/users/Profpatsch/whatcd-resolver/src/Http.hs b/users/Profpatsch/whatcd-resolver/src/Http.hs
index 4fdbb306ad..487d55c21d 100644
--- a/users/Profpatsch/whatcd-resolver/src/Http.hs
+++ b/users/Profpatsch/whatcd-resolver/src/Http.hs
@@ -16,16 +16,14 @@ where
 import AppT
 import Data.CaseInsensitive (CI (original))
 import Data.Char qualified as Char
-import Data.Int (Int64)
 import Data.List qualified as List
 import Data.Text qualified as Text
-import Data.Text.Lazy qualified as Lazy.Text
 import Data.Text.Punycode qualified as Punycode
 import Json.Enc qualified as Enc
 import MyPrelude
 import Network.HTTP.Client
 import Network.HTTP.Simple
-import OpenTelemetry.Attributes qualified as Otel
+import Network.HTTP.Types.Status (Status (..))
 import Optional
 import Prelude hiding (span)
 
@@ -55,20 +53,24 @@ doRequestJson ::
   Enc.Enc ->
   m (Response ByteString)
 doRequestJson opts val = inSpan' "HTTP Request (JSON)" $ \span -> do
-  let x = requestToXhCommandLine opts val
-  let attrs = [100, 200 .. fromIntegral @Int @Int64 (x & Text.length)]
-  for_ attrs $ \n -> do
-    addAttribute span [fmt|request.xh.{n}|] (Lazy.Text.repeat 'x' & Lazy.Text.take n & toStrict & Otel.TextAttribute)
   addAttribute span "request.xh" (requestToXhCommandLine opts val)
-  defaultRequest {secure = not (opts & optsUsePlainHttp)}
-    & setRequestHost (opts & optsHost)
-    & setRequestPort (opts & optsPort)
-    -- TODO: is this automatically escaped by the library?
-    & setRequestPath (opts & optsPath)
-    & setRequestHeaders (opts & optsHeaders)
-    & setRequestMethod opts.method
-    & setRequestBodyLBS (Enc.encToBytesUtf8Lazy val)
-    & httpBS
+  resp <-
+    defaultRequest {secure = not (opts & optsUsePlainHttp)}
+      & setRequestHost (opts & optsHost)
+      & setRequestPort (opts & optsPort)
+      -- TODO: is this automatically escaped by the library?
+      & setRequestPath (opts & optsPath)
+      & setRequestHeaders (opts & optsHeaders)
+      & setRequestMethod opts.method
+      & setRequestBodyLBS (Enc.encToBytesUtf8Lazy val)
+      & httpBS
+  let code = resp & getResponseStatus & (.statusCode)
+  let msg = resp & getResponseStatus & (.statusMessage) & bytesToTextUtf8Lenient
+  addAttribute
+    span
+    "request.response.status"
+    ([fmt|{code} {msg}|] :: Text)
+  pure resp
 
 optsHost :: RequestOptions -> ByteString
 optsHost opts =
diff --git a/users/Profpatsch/whatcd-resolver/src/JsonLd.hs b/users/Profpatsch/whatcd-resolver/src/JsonLd.hs
index 16b1ab991b..1a021b706c 100644
--- a/users/Profpatsch/whatcd-resolver/src/JsonLd.hs
+++ b/users/Profpatsch/whatcd-resolver/src/JsonLd.hs
@@ -3,7 +3,6 @@
 module JsonLd where
 
 import AppT
-import Control.Monad.Reader
 import Data.Aeson qualified as Json
 import Data.Aeson.BetterErrors qualified as Json
 import Data.ByteString.Builder qualified as Builder
diff --git a/users/Profpatsch/whatcd-resolver/src/Redacted.hs b/users/Profpatsch/whatcd-resolver/src/Redacted.hs
index 4369c18408..c0ad9071af 100644
--- a/users/Profpatsch/whatcd-resolver/src/Redacted.hs
+++ b/users/Profpatsch/whatcd-resolver/src/Redacted.hs
@@ -3,6 +3,7 @@
 module Redacted where
 
 import AppT
+import Arg
 import Control.Monad.Logger.CallStack
 import Control.Monad.Reader
 import Data.Aeson qualified as Json
@@ -67,12 +68,8 @@ redactedGetTorrentFile dat = inSpan' "Redacted Get Torrent File" $ \span -> do
       )
   httpTorrent span req
 
--- fix
---   ( \io -> do
---       logInfo "delay"
---       liftIO $ threadDelay 10_000_000
---       io
---   )
+mkRedactedTorrentLink :: Arg "torrentGroupId" Int -> Text
+mkRedactedTorrentLink torrentId = [fmt|https://redacted.ch/torrents.php?id={torrentId.unArg}|]
 
 exampleSearch :: (MonadThrow m, MonadLogger m, MonadPostgres m, MonadOtel m) => m (Transaction m ())
 exampleSearch = do
@@ -360,11 +357,17 @@ data TorrentData transmissionInfo = TorrentData
   { groupId :: Int,
     torrentId :: Int,
     seedingWeight :: Int,
+    artists :: [T2 "artistId" Int "artistName" Text],
     torrentJson :: Json.Value,
-    torrentGroupJson :: T3 "artist" Text "groupName" Text "groupYear" Int,
+    torrentGroupJson :: TorrentGroupJson,
     torrentStatus :: TorrentStatus transmissionInfo
   }
 
+data TorrentGroupJson = TorrentGroupJson
+  { groupName :: Text,
+    groupYear :: Int
+  }
+
 data TorrentStatus transmissionInfo
   = NoTorrentFileYet
   | NotInTransmissionYet
@@ -381,38 +384,70 @@ getTorrentById dat = do
     (Dec.json Json.asValue)
     >>= ensureSingleRow
 
+data GetBestTorrentsFilter = GetBestTorrentsFilter
+  { onlyDownloaded :: Bool,
+    onlyArtist :: Maybe (Label "artistRedactedId" Natural)
+  }
+
 -- | Find the best torrent for each torrent group (based on the seeding_weight)
-getBestTorrents :: (MonadPostgres m) => Transaction m [TorrentData ()]
-getBestTorrents = do
+getBestTorrents ::
+  (MonadPostgres m) =>
+  GetBestTorrentsFilter ->
+  Transaction m [TorrentData ()]
+getBestTorrents opts = do
   queryWith
     [sql|
-      SELECT * FROM (
-        SELECT DISTINCT ON (group_id)
-          tg.group_id,
-          t.torrent_id,
-          seeding_weight,
-          t.full_json_result AS torrent_json,
-          tg.full_json_result AS torrent_group_json,
-          t.torrent_file IS NOT NULL,
-          t.transmission_torrent_hash
-        FROM redacted.torrents t
-        JOIN redacted.torrent_groups tg ON tg.id = t.torrent_group
-        ORDER BY group_id, seeding_weight DESC
-      ) as _
+      WITH filtered_torrents AS (
+        SELECT DISTINCT ON (torrent_group)
+          id
+        FROM
+          redacted.torrents
+        WHERE
+          -- onlyDownloaded
+          ((NOT ?::bool) OR torrent_file IS NOT NULL)
+          -- filter by artist id
+          AND
+          (?::bool OR (to_jsonb(?::int) <@ (jsonb_path_query_array(full_json_result, '$.artists[*].id'))))
+        ORDER BY torrent_group, seeding_weight DESC
+      )
+      SELECT
+        tg.group_id,
+        t.torrent_id,
+        t.seeding_weight,
+        t.full_json_result AS torrent_json,
+        tg.full_json_result AS torrent_group_json,
+        t.torrent_file IS NOT NULL AS has_torrent_file,
+        t.transmission_torrent_hash
+      FROM filtered_torrents f
+      JOIN redacted.torrents t ON t.id = f.id
+      JOIN redacted.torrent_groups tg ON tg.id = t.torrent_group
       ORDER BY seeding_weight DESC
     |]
-    ()
+    ( do
+        let (onlyArtistB, onlyArtistId) = case opts.onlyArtist of
+              Nothing -> (True, 0)
+              Just a -> (False, a.artistRedactedId)
+        ( opts.onlyDownloaded :: Bool,
+          onlyArtistB :: Bool,
+          onlyArtistId & fromIntegral @Natural @Int
+          )
+    )
     ( do
         groupId <- Dec.fromField @Int
         torrentId <- Dec.fromField @Int
         seedingWeight <- Dec.fromField @Int
-        torrentJson <- Dec.json Json.asValue
+        (torrentJson, artists) <- Dec.json $ do
+          val <- Json.asValue
+          artists <- Json.keyOrDefault "artists" [] $ Json.eachInArray $ do
+            id_ <- Json.keyLabel @"artistId" "id" (Json.asIntegral @_ @Int)
+            name <- Json.keyLabel @"artistName" "name" Json.asText
+            pure $ T2 id_ name
+          pure (val, artists)
         torrentGroupJson <-
           ( Dec.json $ do
-              artist <- Json.keyLabel @"artist" "artist" Json.asText
-              groupName <- Json.keyLabel @"groupName" "groupName" Json.asText
-              groupYear <- Json.keyLabel @"groupYear" "groupYear" (Json.asIntegral @_ @Int)
-              pure $ T3 artist groupName groupYear
+              groupName <- Json.key "groupName" Json.asText
+              groupYear <- Json.key "groupYear" (Json.asIntegral @_ @Int)
+              pure $ TorrentGroupJson {..}
             )
         hasTorrentFile <- Dec.fromField @Bool
         transmissionTorrentHash <-
diff --git a/users/Profpatsch/whatcd-resolver/src/Transmission.hs b/users/Profpatsch/whatcd-resolver/src/Transmission.hs
index 66dbeb9ce7..acbab00162 100644
--- a/users/Profpatsch/whatcd-resolver/src/Transmission.hs
+++ b/users/Profpatsch/whatcd-resolver/src/Transmission.hs
@@ -25,6 +25,7 @@ import Json.Enc qualified as Enc
 import Label
 import MyPrelude
 import Network.HTTP.Types
+import OpenTelemetry.Attributes (ToAttribute (toAttribute))
 import OpenTelemetry.Trace qualified as Otel hiding (getTracer, inSpan, inSpan')
 import Optional
 import Postgres.MonadPostgres
@@ -226,7 +227,7 @@ doTransmissionRequest ::
   (TransmissionRequest, Json.Parse Error output) ->
   m (TransmissionResponse output)
 doTransmissionRequest span dat (req, parser) = do
-  sessionId <- getTransmissionId
+  sessionId <- getCurrentTransmissionSessionId
   let textArg t = (Enc.text t, Otel.toAttribute @Text t)
   let encArg enc = (enc, Otel.toAttribute @Text $ enc & Enc.encToTextPretty)
   let intArg i = (Enc.int i, Otel.toAttribute @Int i)
@@ -257,7 +258,7 @@ doTransmissionRequest span dat (req, parser) = do
       (body <&> second fst & Enc.object)
   -- Implement the CSRF protection thingy
   case resp & Http.getResponseStatus & (.statusCode) of
-    409 -> do
+    409 -> inSpan' "New Transmission Session ID" $ \span' -> do
       tid <-
         resp
           & Http.getResponseHeader "X-Transmission-Session-Id"
@@ -266,9 +267,21 @@ doTransmissionRequest span dat (req, parser) = do
           & unwrapIOError
           & liftIO
           <&> NonEmpty.head
-      setTransmissionId tid
+
+      addAttributes span' $
+        HashMap.fromList
+          [ ("transmission.new_session_id", tid & bytesToTextUtf8Lenient & toAttribute),
+            ("transmission.old_session_id", sessionId <&> bytesToTextUtf8Lenient & fromMaybe "<none yet>" & toAttribute)
+          ]
+
+      updateTransmissionSessionId tid
+
       doTransmissionRequest span dat (req, parser)
-    200 ->
+    200 -> do
+      addAttributes span $
+        HashMap.fromList
+          [ ("transmission.valid_session_id", sessionId <&> bytesToTextUtf8Lenient & fromMaybe "<none yet>" & toAttribute)
+          ]
       resp
         & Http.getResponseBody
         & Json.parseStrict
@@ -296,11 +309,11 @@ doTransmissionRequest span dat (req, parser) = do
     _ -> liftIO $ unwrapIOError $ Left [fmt|Non-200 response: {showPretty resp}|]
 
 class MonadTransmission m where
-  getTransmissionId :: m (Maybe ByteString)
-  setTransmissionId :: ByteString -> m ()
+  getCurrentTransmissionSessionId :: m (Maybe ByteString)
+  updateTransmissionSessionId :: ByteString -> m ()
 
 instance (MonadIO m) => MonadTransmission (AppT m) where
-  getTransmissionId = AppT (asks (.transmissionSessionId)) >>= tryTakeMVar
-  setTransmissionId t = do
+  getCurrentTransmissionSessionId = AppT (asks (.transmissionSessionId)) >>= readIORef
+  updateTransmissionSessionId t = do
     var <- AppT $ asks (.transmissionSessionId)
-    putMVar var t
+    writeIORef var (Just t)
diff --git a/users/Profpatsch/whatcd-resolver/src/WhatcdResolver.hs b/users/Profpatsch/whatcd-resolver/src/WhatcdResolver.hs
index f1902bac8c..73a9dccb12 100644
--- a/users/Profpatsch/whatcd-resolver/src/WhatcdResolver.hs
+++ b/users/Profpatsch/whatcd-resolver/src/WhatcdResolver.hs
@@ -3,6 +3,7 @@
 module WhatcdResolver where
 
 import AppT
+import Arg
 import Control.Category qualified as Cat
 import Control.Monad.Catch.Pure (runCatch)
 import Control.Monad.Logger.CallStack
@@ -10,6 +11,7 @@ import Control.Monad.Reader
 import Data.Aeson qualified as Json
 import Data.Aeson.BetterErrors qualified as Json
 import Data.Aeson.KeyMap qualified as KeyMap
+import Data.Error.Tree (prettyErrorTree)
 import Data.HashMap.Strict qualified as HashMap
 import Data.List qualified as List
 import Data.Map.Strict qualified as Map
@@ -23,6 +25,7 @@ import FieldParser (FieldParser, FieldParser' (..))
 import FieldParser qualified as Field
 import Html qualified
 import IHP.HSX.QQ (hsx)
+import IHP.HSX.ToHtml (ToHtml)
 import Json qualified
 import Json.Enc (Enc)
 import Json.Enc qualified as Enc
@@ -36,7 +39,6 @@ import Network.HTTP.Types
 import Network.HTTP.Types qualified as Http
 import Network.URI (URI)
 import Network.URI qualified
-import Network.URI qualified as URI
 import Network.Wai (ResponseReceived)
 import Network.Wai qualified as Wai
 import Network.Wai.Handler.Warp qualified as Warp
@@ -55,7 +57,6 @@ import System.Directory qualified as Xdg
 import System.Environment qualified as Env
 import System.FilePath ((</>))
 import Text.Blaze.Html (Html)
-import Text.Blaze.Html.Renderer.Pretty qualified as Html.Pretty
 import Text.Blaze.Html.Renderer.Utf8 qualified as Html
 import Text.Blaze.Html5 qualified as Html
 import Tool (readTool, readTools)
@@ -77,7 +78,6 @@ main =
 
 htmlUi :: AppT IO ()
 htmlUi = do
-  let debug = True
   uniqueRunId <-
     runTransaction $
       querySingleRowWith
@@ -87,13 +87,13 @@ htmlUi = do
         ()
         (Dec.fromField @Text)
 
-  withRunInIO $ \runInIO -> Warp.run 9093 $ \req respond -> do
+  withRunInIO $ \runInIO -> Warp.run 9093 $ \req respondOrig -> do
     let catchAppException act =
           try act >>= \case
             Right a -> pure a
             Left (AppException err) -> do
               runInIO (logError err)
-              respond (Wai.responseLBS Http.status500 [] "")
+              respondOrig (Wai.responseLBS Http.status500 [] "")
 
     catchAppException $ do
       let mp span parser =
@@ -108,9 +108,6 @@ htmlUi = do
               ( do
                   label @"torrentId" <$> Multipart.field "torrent-id" ((Field.utf8 >>> Field.signedDecimal >>> Field.bounded @Int "int"))
               )
-      let parseQueryArgs span parser =
-            Parse.runParse "Unable to find the right request query arguments" (lmap Wai.queryString parser) req
-              & assertM span id
 
       let parseQueryArgsNewSpan spanName parser =
             Parse.runParse "Unable to find the right request query arguments" (lmap Wai.queryString parser) req
@@ -119,9 +116,9 @@ htmlUi = do
       let handlers :: Handlers (AppT IO)
           handlers respond =
             Map.fromList
-              [ ("", respond.h (mainHtml uniqueRunId)),
+              [ ("", respond.html (mainHtml uniqueRunId)),
                 ( "snips/redacted/search",
-                  respond.h $
+                  respond.html $
                     \span -> do
                       dat <-
                         mp
@@ -132,12 +129,12 @@ htmlUi = do
                       snipsRedactedSearch dat
                 ),
                 ( "snips/redacted/torrentDataJson",
-                  respond.h $ \span -> do
+                  respond.html $ \span -> do
                     dat <- torrentIdMp span
                     Html.mkVal <$> (runTransaction $ getTorrentById dat)
                 ),
                 ( "snips/redacted/getTorrentFile",
-                  respond.h $ \span -> do
+                  respond.html $ \span -> do
                     dat <- torrentIdMp span
                     runTransaction $ do
                       inserted <- redactedGetTorrentFileAndInsert dat
@@ -157,7 +154,7 @@ htmlUi = do
                 ),
                 -- TODO: this is bad duplication??
                 ( "snips/redacted/startTorrentFile",
-                  respond.h $ \span -> do
+                  respond.html $ \span -> do
                     dat <- torrentIdMp span
                     runTransaction $ do
                       file <-
@@ -180,7 +177,7 @@ htmlUi = do
                           "Starting"
                 ),
                 ( "snips/transmission/getTorrentState",
-                  respond.h $ \span -> do
+                  respond.html $ \span -> do
                     dat <- mp span $ label @"torrentHash" <$> Multipart.field "torrent-hash" Field.utf8
                     status <-
                       doTransmissionRequest'
@@ -199,17 +196,29 @@ htmlUi = do
                         Just _torrent -> [hsx|Running|]
                 ),
                 ( "snips/jsonld/render",
-                  respond.h $ \span -> do
-                    qry <-
-                      parseQueryArgs
-                        span
-                        ( label @"target"
-                            <$> ( (singleQueryArgument "target" Field.utf8 >>> textToURI)
-                                    & Parse.andParse uriToHttpClientRequest
-                                )
-                        )
-                    jsonld <- httpGetJsonLd (qry.target)
-                    pure $ renderJsonld jsonld
+                  do
+                    let HandlerResponses {htmlWithQueryArgs} = respond
+                    htmlWithQueryArgs
+                      ( label @"target"
+                          <$> ( (singleQueryArgument "target" Field.utf8 >>> textToURI)
+                                  & Parse.andParse uriToHttpClientRequest
+                              )
+                      )
+                      ( \qry _span -> do
+                          jsonld <- httpGetJsonLd (qry.target)
+                          pure $ renderJsonld jsonld
+                      )
+                ),
+                ( "artist",
+                  do
+                    let HandlerResponses {htmlWithQueryArgs} = respond
+
+                    htmlWithQueryArgs
+                      ( label @"artistRedactedId"
+                          <$> (singleQueryArgument "redacted_id" (Field.utf8 >>> Field.decimalNatural))
+                      )
+                      $ \qry _span -> do
+                        artistPage qry
                 ),
                 ( "autorefresh",
                   respond.plain $ do
@@ -233,30 +242,62 @@ htmlUi = do
               ]
       runInIO $
         runHandlers
-          debug
-          (\respond -> respond.h $ (mainHtml uniqueRunId))
+          (\respond -> respond.html $ (mainHtml uniqueRunId))
           handlers
           req
-          respond
+          respondOrig
   where
     everySecond :: Text -> Enc -> Html -> Html
     everySecond call extraData innerHtml = [hsx|<div hx-trigger="every 1s" hx-swap="outerHTML" hx-post={call} hx-vals={Enc.encToBytesUtf8 extraData}>{innerHtml}</div>|]
 
     mainHtml :: Text -> Otel.Span -> AppT IO Html
     mainHtml uniqueRunId _span = runTransaction $ do
-      jsonld <-
-        httpGetJsonLd
-          ( URI.parseURI "https://musicbrainz.org/work/92000fd4-d304-406d-aeb4-6bdbeed318ec" & annotate "not an URI" & unwrapError,
-            "https://musicbrainz.org/work/92000fd4-d304-406d-aeb4-6bdbeed318ec"
-          )
-          <&> renderJsonld
-      bestTorrentsTable <- getBestTorrentsTable
+      -- jsonld <-
+      --   httpGetJsonLd
+      --     ( URI.parseURI "https://musicbrainz.org/work/92000fd4-d304-406d-aeb4-6bdbeed318ec" & annotate "not an URI" & unwrapError,
+      --       "https://musicbrainz.org/work/92000fd4-d304-406d-aeb4-6bdbeed318ec"
+      --     )
+      --     <&> renderJsonld
+      bestTorrentsTable <- getBestTorrentsTable Nothing
       -- transmissionTorrentsTable <- lift @Transaction getTransmissionTorrentsTable
       pure $
-        Html.docTypeHtml
+        htmlPageChrome
+          "whatcd-resolver"
           [hsx|
+            <form
+              hx-post="/snips/redacted/search"
+              hx-target="#redacted-search-results">
+              <label for="redacted-search">Redacted Search</label>
+              <input
+                id="redacted-search"
+                type="text"
+                name="redacted-search" />
+              <button type="submit" hx-disabled-elt="this">Search</button>
+              <div class="htmx-indicator">Search running!</div>
+            </form>
+            <div id="redacted-search-results">
+              {bestTorrentsTable}
+            </div>
+            <!-- refresh the page if the uniqueRunId is different -->
+            <input
+                hidden
+                type="text"
+                id="autorefresh"
+                name="hasItBeenRestarted"
+                value={uniqueRunId}
+                hx-get="/autorefresh"
+                hx-trigger="every 5s"
+                hx-swap="none"
+            />
+        |]
+
+htmlPageChrome :: (ToHtml a) => Text -> a -> Html
+htmlPageChrome title body =
+  Html.docTypeHtml $
+    [hsx|
       <head>
-        <title>whatcd-resolver</title>
+        <!-- TODO: set nice page title for each page -->
+        <title>{title}</title>
         <meta charset="utf-8">
         <meta name="viewport" content="width=device-width, initial-scale=1">
         <link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-9ndCyUaIbzAi2FUVXJi0CjmCapSmO7SnpJef0486qhLnuZ2cdeRhO02iuK6FUUVM" crossorigin="anonymous">
@@ -271,73 +312,96 @@ htmlUi = do
         </style>
       </head>
       <body>
-        {jsonld}
-        <form
-          hx-post="/snips/redacted/search"
-          hx-target="#redacted-search-results">
-          <label for="redacted-search">Redacted Search</label>
-          <input
-            id="redacted-search"
-            type="text"
-            name="redacted-search" />
-          <button type="submit" hx-disabled-elt="this">Search</button>
-          <div class="htmx-indicator">Search running!</div>
-        </form>
-        <div id="redacted-search-results">
-          {bestTorrentsTable}
-        </div>
-        <!-- refresh the page if the uniqueRunId is different -->
-        <input
-             hidden
-             type="text"
-             id="autorefresh"
-             name="hasItBeenRestarted"
-             value={uniqueRunId}
-             hx-get="/autorefresh"
-             hx-trigger="every 5s"
-             hx-swap="none"
-        />
+        {body}
       </body>
     |]
 
+artistPage ::
+  ( HasField "artistRedactedId" dat Natural,
+    MonadPostgres m,
+    MonadOtel m,
+    MonadLogger m,
+    MonadThrow m,
+    MonadTransmission m
+  ) =>
+  dat ->
+  m Html
+artistPage dat = runTransaction $ do
+  fresh <- getBestTorrentsData (Just $ getLabel @"artistRedactedId" dat)
+  let artistName = fresh & findMaybe (\t -> t.artists & findMaybe (\a -> if a.artistId == (dat.artistRedactedId & fromIntegral @Natural @Int) then Just a.artistName else Nothing))
+  let torrents = mkBestTorrentsTable fresh
+  pure $
+    htmlPageChrome
+      ( case artistName of
+          Nothing -> "whatcd-resolver"
+          Just a -> [fmt|{a} - Artist Page - whatcd-resolver|]
+      )
+      [hsx|
+        Artist ID: {dat.artistRedactedId}
+
+        {torrents}
+      |]
+
 type Handlers m = HandlerResponses m -> Map Text (m ResponseReceived)
 
-type HandlerResponses m = T2 "h" ((Otel.Span -> m Html) -> m ResponseReceived) "plain" (m Wai.Response -> m ResponseReceived)
+data HandlerResponses m = HandlerResponses
+  { -- | render html
+    html :: (Otel.Span -> m Html) -> m ResponseReceived,
+    -- | render html after parsing some query arguments
+    htmlWithQueryArgs :: forall a. (Parse Query a -> (a -> Otel.Span -> m Html) -> m ResponseReceived),
+    -- | render a plain wai response
+    plain :: (m Wai.Response -> m ResponseReceived)
+  }
 
 runHandlers ::
   (MonadOtel m) =>
-  Bool ->
   (HandlerResponses m -> m ResponseReceived) ->
   (HandlerResponses m -> Map Text (m ResponseReceived)) ->
   Wai.Request ->
   (Wai.Response -> IO ResponseReceived) ->
   m ResponseReceived
-runHandlers debug defaultHandler handlers req respond = withRunInIO $ \runInIO -> do
-  let renderHtml =
-        if debug
-          then Html.Pretty.renderHtml >>> stringToText >>> textToBytesUtf8 >>> toLazyBytes
-          else Html.renderHtml
-  let hh route act =
+runHandlers defaultHandler handlers req respond = withRunInIO $ \runInIO -> do
+  let path = req & Wai.pathInfo & Text.intercalate "/"
+  let html act =
         Otel.inSpan'
-          [fmt|Route {route }|]
+          [fmt|Route /{path}|]
           ( Otel.defaultSpanArguments
               { Otel.attributes =
                   HashMap.fromList
-                    [ ("server.path", Otel.toAttribute @Text route)
+                    [ ("_.server.path", Otel.toAttribute @Text path),
+                      ("_.server.query_args", Otel.toAttribute @Text (req.rawQueryString & bytesToTextUtf8Lenient))
                     ]
               }
           )
           ( \span -> do
-              res <- act span
-              liftIO $ respond . Wai.responseLBS Http.ok200 ([("Content-Type", "text/html")] <> res.extraHeaders) . renderHtml $ res.html
+              res <- act span <&> (\h -> T2 (label @"html" h) (label @"extraHeaders" []))
+              liftIO $ respond . Wai.responseLBS Http.ok200 ([("Content-Type", "text/html")] <> res.extraHeaders) . Html.renderHtml $ res.html
           )
-  let h route act = hh route (\span -> act span <&> (\html -> T2 (label @"html" html) (label @"extraHeaders" [])))
 
-  let path = (req & Wai.pathInfo & Text.intercalate "/")
   let handlerResponses =
-        ( T2
-            (label @"h" (h path))
-            (label @"plain" (\m -> liftIO $ runInIO m >>= respond))
+        ( HandlerResponses
+            { plain = (\m -> liftIO $ runInIO m >>= respond),
+              html,
+              htmlWithQueryArgs = \parser act ->
+                case req & Parse.runParse "Unable to find the right request query arguments" (lmap Wai.queryString parser) of
+                  Right a -> html (act a)
+                  Left err ->
+                    html
+                      ( \span -> do
+                          recordException
+                            span
+                            ( T2
+                                (label @"type_" "Query Parse Exception")
+                                (label @"message" (prettyErrorTree err))
+                            )
+
+                          pure
+                            [hsx|
+                              <h1>Error:</h1>
+                              <pre>{err & prettyErrorTree}</pre>
+                            |]
+                      )
+            }
         )
   let handler =
         (handlers handlerResponses)
@@ -417,7 +481,11 @@ snipsRedactedSearch dat = do
       ]
   runTransaction $ do
     t
-    getBestTorrentsTable
+    getBestTorrentsTable (Nothing :: Maybe (Label "artistRedactedId" Natural))
+
+data ArtistFilter = ArtistFilter
+  { onlyArtist :: Maybe (Label "artistId" Text)
+  }
 
 getBestTorrentsTable ::
   ( MonadTransmission m,
@@ -426,9 +494,23 @@ getBestTorrentsTable ::
     MonadPostgres m,
     MonadOtel m
   ) =>
+  Maybe (Label "artistRedactedId" Natural) ->
   Transaction m Html
-getBestTorrentsTable = do
-  bestStale :: [TorrentData ()] <- getBestTorrents
+getBestTorrentsTable dat = do
+  fresh <- getBestTorrentsData dat
+  pure $ mkBestTorrentsTable fresh
+
+getBestTorrentsData ::
+  ( MonadTransmission m,
+    MonadThrow m,
+    MonadLogger m,
+    MonadPostgres m,
+    MonadOtel m
+  ) =>
+  Maybe (Label "artistRedactedId" Natural) ->
+  Transaction m [TorrentData (Label "percentDone" Percentage)]
+getBestTorrentsData artistFilter = do
+  bestStale :: [TorrentData ()] <- getBestTorrents GetBestTorrentsFilter {onlyArtist = artistFilter, onlyDownloaded = False}
   actual <-
     getAndUpdateTransmissionTorrentsStatus
       ( bestStale
@@ -440,20 +522,23 @@ getBestTorrentsTable = do
           <&> (\t -> (getLabel @"torrentHash" t, t.transmissionInfo))
           & Map.fromList
       )
-  let fresh =
-        bestStale
-          --  we have to update the status of every torrent that’s not in tranmission anymore
-          -- TODO I feel like it’s easier (& more correct?) to just do the database request again …
-          <&> ( \td -> case td.torrentStatus of
-                  InTransmission info ->
-                    case actual & Map.lookup (getLabel @"torrentHash" info) of
-                      -- TODO this is also pretty dumb, cause it assumes that we have the torrent file if it was in transmission before,
-                      -- which is an internal factum that is established in getBestTorrents (and might change later)
-                      Nothing -> td {torrentStatus = NotInTransmissionYet}
-                      Just transmissionInfo -> td {torrentStatus = InTransmission (T2 (getLabel @"torrentHash" info) (label @"transmissionInfo" transmissionInfo))}
-                  NotInTransmissionYet -> td {torrentStatus = NotInTransmissionYet}
-                  NoTorrentFileYet -> td {torrentStatus = NoTorrentFileYet}
-              )
+  pure $
+    bestStale
+      --  we have to update the status of every torrent that’s not in tranmission anymore
+      -- TODO I feel like it’s easier (& more correct?) to just do the database request again …
+      <&> ( \td -> case td.torrentStatus of
+              InTransmission info ->
+                case actual & Map.lookup (getLabel @"torrentHash" info) of
+                  -- TODO this is also pretty dumb, cause it assumes that we have the torrent file if it was in transmission before,
+                  -- which is an internal factum that is established in getBestTorrents (and might change later)
+                  Nothing -> td {torrentStatus = NotInTransmissionYet}
+                  Just transmissionInfo -> td {torrentStatus = InTransmission (T2 (getLabel @"torrentHash" info) (label @"transmissionInfo" transmissionInfo))}
+              NotInTransmissionYet -> td {torrentStatus = NotInTransmissionYet}
+              NoTorrentFileYet -> td {torrentStatus = NoTorrentFileYet}
+          )
+
+mkBestTorrentsTable :: [TorrentData (Label "percentDone" Percentage)] -> Html
+mkBestTorrentsTable fresh = do
   let localTorrent b = case b.torrentStatus of
         NoTorrentFileYet -> [hsx|<button hx-post="snips/redacted/getTorrentFile" hx-swap="outerHTML" hx-vals={Enc.encToBytesUtf8 $ Enc.object [("torrent-id", Enc.int b.torrentId)]}>Upload Torrent</button>|]
         InTransmission info -> [hsx|{info.transmissionInfo.percentDone.unPercentage}% done|]
@@ -462,19 +547,34 @@ getBestTorrentsTable = do
         fresh
           & foldMap
             ( \b -> do
+                let artists =
+                      b.artists
+                        <&> ( \a ->
+                                T2
+                                  (label @"url" [fmt|/artist?redacted_id={a.artistId}|])
+                                  (label @"content" $ Html.toHtml @Text a.artistName)
+                            )
+                        & mkLinkList
+
                 [hsx|
                   <tr>
                   <td>{localTorrent b}</td>
                   <td>{Html.toHtml @Int b.groupId}</td>
-                  <td>{Html.toHtml @Text b.torrentGroupJson.artist}</td>
-                  <td>{Html.toHtml @Text b.torrentGroupJson.groupName}</td>
+                  <td>
+                    {artists}
+                  </td>
+                  <td>
+                    <a href={mkRedactedTorrentLink (Arg b.groupId)} target="_blank">
+                      {Html.toHtml @Text b.torrentGroupJson.groupName}
+                    </a>
+                  </td>
+                  <td>{Html.toHtml @Int b.torrentGroupJson.groupYear}</td>
                   <td>{Html.toHtml @Int b.seedingWeight}</td>
                   <td><details hx-trigger="toggle once" hx-post="snips/redacted/torrentDataJson" hx-vals={Enc.encToBytesUtf8 $ Enc.object [("torrent-id", Enc.int b.torrentId)]}></details></td>
                   </tr>
                 |]
             )
-  pure $
-    [hsx|
+  [hsx|
         <table class="table">
           <thead>
             <tr>
@@ -482,6 +582,7 @@ getBestTorrentsTable = do
               <th>Group ID</th>
               <th>Artist</th>
               <th>Name</th>
+              <th>Year</th>
               <th>Weight</th>
               <th>Torrent</th>
               <th>Torrent Group</th>
@@ -493,6 +594,15 @@ getBestTorrentsTable = do
         </table>
       |]
 
+mkLinkList :: [T2 "url" Text "content" Html] -> Html
+mkLinkList xs =
+  xs
+    <&> ( \x -> do
+            [hsx|<a href={x.url}>{x.content}</a>|]
+        )
+    & List.intersperse ", "
+    & mconcat
+
 getTransmissionTorrentsTable ::
   (MonadTransmission m, MonadThrow m, MonadLogger m, MonadOtel m) => m Html
 getTransmissionTorrentsTable = do
@@ -556,35 +666,48 @@ migrate = inSpan "Database Migration" $ do
       UNIQUE(torrent_id)
     );
 
+    CREATE INDEX IF NOT EXISTS redacted_torrents_json_torrent_group_fk ON redacted.torrents_json (torrent_group);
+
+
     ALTER TABLE redacted.torrents_json
     ADD COLUMN IF NOT EXISTS torrent_file bytea NULL;
     ALTER TABLE redacted.torrents_json
     ADD COLUMN IF NOT EXISTS transmission_torrent_hash text NULL;
 
-    -- inflect out values of the full json
 
+    -- the seeding weight is used to find the best torrent in a group.
+    CREATE OR REPLACE FUNCTION calc_seeding_weight(full_json_result jsonb) RETURNS int AS $$
+    BEGIN
+      RETURN
+        ((full_json_result->'seeders')::integer*3
+        + (full_json_result->'snatches')::integer
+        )
+        -- prefer remasters by multiplying them with 3
+        * (CASE
+            WHEN full_json_result->>'remasterTitle' ILIKE '%remaster%'
+            THEN 3
+            ELSE 1
+          END);
+    END;
+    $$ LANGUAGE plpgsql IMMUTABLE;
+
+    ALTER TABLE redacted.torrents_json
+    ADD COLUMN IF NOT EXISTS seeding_weight int GENERATED ALWAYS AS (calc_seeding_weight(full_json_result)) STORED;
+
+    -- inflect out values of the full json
     CREATE OR REPLACE VIEW redacted.torrents AS
     SELECT
       t.id,
       t.torrent_id,
       t.torrent_group,
       -- the seeding weight is used to find the best torrent in a group.
-      ( ((full_json_result->'seeders')::integer*3
-        + (full_json_result->'snatches')::integer
-        )
-      -- prefer remasters by multiplying them with 3
-      * (CASE
-          WHEN full_json_result->>'remasterTitle' ILIKE '%remaster%'
-          THEN 3
-          ELSE 1
-         END)
-      )
-      AS seeding_weight,
+      t.seeding_weight,
       t.full_json_result,
       t.torrent_file,
       t.transmission_torrent_hash
     FROM redacted.torrents_json t;
 
+
     CREATE INDEX IF NOT EXISTS torrents_json_seeding ON redacted.torrents_json(((full_json_result->'seeding')::integer));
     CREATE INDEX IF NOT EXISTS torrents_json_snatches ON redacted.torrents_json(((full_json_result->'snatches')::integer));
   |]
@@ -624,7 +747,8 @@ httpTorrent span req =
 
 runAppWith :: AppT IO a -> IO (Either TmpPg.StartError a)
 runAppWith appT = withTracer $ \tracer -> withDb $ \db -> do
-  pgFormat <- readTools (label @"toolsEnvVar" "WHATCD_RESOLVER_TOOLS") (readTool "pg_format")
+  tool <- readTools (label @"toolsEnvVar" "WHATCD_RESOLVER_TOOLS") (readTool "pg_format")
+  pgFormat <- initPgFormatPool (label @"pgFormat" tool)
   let config = label @"logDatabaseQueries" LogDatabaseQueries
   pgConnPool <-
     Pool.newPool $
@@ -633,7 +757,7 @@ runAppWith appT = withTracer $ \tracer -> withDb $ \db -> do
         {- resource destruction -} Postgres.close
         {- unusedResourceOpenTime -} 10
         {- max resources across all stripes -} 20
-  transmissionSessionId <- newEmptyMVar
+  transmissionSessionId <- newIORef Nothing
   let newAppT = do
         logInfo [fmt|Running with config: {showPretty config}|]
         logInfo [fmt|Connected to database at {db & TmpPg.toDataDirectory} on socket {db & TmpPg.toConnectionString}|]
diff --git a/users/Profpatsch/whatcd-resolver/whatcd-resolver.cabal b/users/Profpatsch/whatcd-resolver/whatcd-resolver.cabal
index a9bd04827b..8b3258bb5f 100644
--- a/users/Profpatsch/whatcd-resolver/whatcd-resolver.cabal
+++ b/users/Profpatsch/whatcd-resolver/whatcd-resolver.cabal
@@ -119,3 +119,7 @@ executable whatcd-resolver
     build-depends:
         base >=4.15 && <5,
         whatcd-resolver
+
+    ghc-options:
+      -threaded
+
diff --git a/users/flokli/archeology/default.nix b/users/flokli/archeology/default.nix
index d642399cbe..690944403b 100644
--- a/users/flokli/archeology/default.nix
+++ b/users/flokli/archeology/default.nix
@@ -10,7 +10,7 @@ let
   '';
   # clickhouse has a very odd AWS config concept.
   # Configure it to be a bit more sane.
-  clickhoseLocalFixedAWS = pkgs.runCommand "clickhouse-local-fixed"
+  clickhouseLocalFixedAWS = pkgs.runCommand "clickhouse-local-fixed"
     {
       nativeBuildInputs = [ pkgs.makeWrapper ];
     } ''
@@ -21,19 +21,19 @@ let
 in
 
 depot.nix.readTree.drvTargets {
-  inherit clickhoseLocalFixedAWS;
+  inherit clickhouseLocalFixedAWS;
   parse-bucket-logs = pkgs.runCommand "archeology-parse-bucket-logs"
     {
       nativeBuildInputs = [ pkgs.makeWrapper ];
     } ''
     mkdir -p $out/bin
     makeWrapper ${(pkgs.writers.writeRust "parse-bucket-logs-unwrapped" {} ./parse_bucket_logs.rs)} $out/bin/archeology-parse-bucket-logs \
-      --prefix PATH : ${pkgs.lib.makeBinPath [ clickhoseLocalFixedAWS ]}
+      --prefix PATH : ${pkgs.lib.makeBinPath [ clickhouseLocalFixedAWS ]}
   '';
 
   shell = pkgs.mkShell {
     name = "archeology-shell";
-    packages = with pkgs; [ awscli2 clickhoseLocalFixedAWS rust-analyzer rustc rustfmt ];
+    packages = with pkgs; [ awscli2 clickhouseLocalFixedAWS rust-analyzer rustc rustfmt ];
 
     AWS_PROFILE = "sso";
     AWS_CONFIG_FILE = pkgs.writeText "aws-config" ''
diff --git a/users/flokli/ipu6-softisp/default.nix b/users/flokli/ipu6-softisp/default.nix
index 1f603dbb42..66a2f04a51 100644
--- a/users/flokli/ipu6-softisp/default.nix
+++ b/users/flokli/ipu6-softisp/default.nix
@@ -42,14 +42,14 @@ depot.nix.readTree.drvTargets rec {
   });
 
   # Make sure the firmware requested by the driver is present in our firmware.
-  # We do have a .xz suffix here, but that's fine, since request_firmware does
-  # check ${name}.xz too in case CONFIG_FW_LOADER_COMPRESS is set.
+  # We do have a .zst suffix here, but that's fine, since request_firmware does
+  # check ${name}.zst too in case CONFIG_FW_LOADER_COMPRESS is set.
   # The path needs to be kept in sync with the ones used in the kernel patch.
   checkFirmware = pkgs.runCommand "check-firmware" { } ''
-    stat ${testSystem}/firmware/intel/ipu/ipu6se_fw.bin.xz
-    stat ${testSystem}/firmware/intel/ipu/ipu6ep_fw.bin.xz
-    stat ${testSystem}/firmware/intel/ipu/ipu6_fw.bin.xz
-    stat ${testSystem}/firmware/intel/ipu/ipu6epmtl_fw.bin.xz
+    stat ${testSystem}/firmware/intel/ipu/ipu6se_fw.bin.zst
+    stat ${testSystem}/firmware/intel/ipu/ipu6ep_fw.bin.zst
+    stat ${testSystem}/firmware/intel/ipu/ipu6_fw.bin.zst
+    stat ${testSystem}/firmware/intel/ipu/ipu6epmtl_fw.bin.zst
 
     # all good, succeed build
     touch $out
diff --git a/users/tazjin/chase-geese/default.nix b/users/tazjin/chase-geese/default.nix
index 3549f75868..595ca92896 100644
--- a/users/tazjin/chase-geese/default.nix
+++ b/users/tazjin/chase-geese/default.nix
@@ -9,5 +9,5 @@ pkgs.writeShellScriptBin "chase-geese" ''
 
   echo "Mounting the cloud ..."
   mkdir -p ~/cloud
-  ${depot.third_party.geesefs}/bin/geesefs tazjins-files ~/cloud
+  ${pkgs.geesefs}/bin/geesefs tazjins-files ~/cloud
 ''
diff --git a/users/tazjin/emacs/config/settings.el b/users/tazjin/emacs/config/settings.el
index 6c66ca608d..afe181b70b 100644
--- a/users/tazjin/emacs/config/settings.el
+++ b/users/tazjin/emacs/config/settings.el
@@ -19,6 +19,9 @@
       ediff-split-window-function 'split-window-horizontally
       initial-major-mode 'emacs-lisp-mode)
 
+(setq-default tab-width 4)
+(setq-default fill-column 80)
+
 (add-to-list 'safe-local-variable-values '(lexical-binding . t))
 (add-to-list 'safe-local-variable-values '(whitespace-line-column . 80))
 
diff --git a/users/tazjin/emacs/default.nix b/users/tazjin/emacs/default.nix
index 46843432f1..17973b8b48 100644
--- a/users/tazjin/emacs/default.nix
+++ b/users/tazjin/emacs/default.nix
@@ -10,7 +10,7 @@ pkgs.makeOverridable
 
     # If switching telega versions, use this variable because it will
     # keep the version check, binary path and so on in sync.
-    currentTelega = epkgs: epkgs.melpaPackages.telega;
+    currentTelega = epkgs: epkgs.telega;
 
     # $PATH for binaries that need to be available to Emacs
     emacsBinPath = lib.makeBinPath [
diff --git a/users/tazjin/home/arbat.nix b/users/tazjin/home/arbat.nix
new file mode 100644
index 0000000000..83daf2012c
--- /dev/null
+++ b/users/tazjin/home/arbat.nix
@@ -0,0 +1,11 @@
+# Home manage configuration for arbat.
+
+{ depot, pkgs, ... }: # readTree
+{ config, lib, ... }: # home-manager
+
+{
+  imports = [
+    depot.users.tazjin.home.shared
+    depot.users.tazjin.home.persistence
+  ];
+}
diff --git a/users/tazjin/keys/default.nix b/users/tazjin/keys/default.nix
index 16b232b094..300cd49e89 100644
--- a/users/tazjin/keys/default.nix
+++ b/users/tazjin/keys/default.nix
@@ -9,4 +9,5 @@ in withAll {
   zamalek_ed25519 = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDBRXeb8EuecLHP0bW4zuebXp4KRnXgJTZfeVWXQ1n1R tazjin@zamalek";
   khamovnik_yk = "ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBPgOyR4rRM8IaVGgN2ZxGlKtd7GLYbxdRTRa3u9EhRNSkHAvRTN9sgw7mm0iPLnHChPy10anKV43vTaIm906Gm8=";
   khamovnik_agenix = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIG4YSl5+DHQR3rOoBJLQfQ840U0CrYkByMKdzu/LDxoT tazjin@khamovnik";
+  arbat = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJ1Eai0p7eF7XML5wokqF4GlVZM+YXEORfs/GPGwEky7 tazjin@arbat";
 }
diff --git a/users/tazjin/nixos/arbat/default.nix b/users/tazjin/nixos/arbat/default.nix
new file mode 100644
index 0000000000..73bed4573a
--- /dev/null
+++ b/users/tazjin/nixos/arbat/default.nix
@@ -0,0 +1,74 @@
+# arbat is my Unchartevice 6640MA, with a Zhaoxin CPU.
+{ depot, lib, pkgs, ... }:
+
+config:
+let
+  mod = name: depot.path.origSrc + ("/ops/modules/" + name);
+  usermod = name: depot.path.origSrc + ("/users/tazjin/nixos/modules/" + name);
+
+  zdevice = device: {
+    inherit device;
+    fsType = "zfs";
+  };
+in
+{
+  imports = [
+    (usermod "chromium.nix")
+    (usermod "desktop.nix")
+    (usermod "fonts.nix")
+    (usermod "home-config.nix")
+    (usermod "laptop.nix")
+    (usermod "persistence.nix")
+    (usermod "physical.nix")
+    (pkgs.home-manager.src + "/nixos")
+  ];
+
+  tvl.cache.enable = true;
+
+  boot = {
+    loader.systemd-boot.enable = true;
+    supportedFilesystems = [ "zfs" ];
+    zfs.devNodes = "/dev/";
+    # TODO: double-check this list
+    initrd.availableKernelModules = [ "ahci" "uhci_hcd" "ehci_pci" "xhci_pci" "usb_storage" "sd_mod" "rtsx_usb_sdmmc" ];
+    kernelModules = [ "kvm-intel" ]; # interesting
+  };
+
+  networking = {
+    hostName = "arbat";
+    hostId = "864f050b";
+    networkmanager.enable = true;
+  };
+
+  fileSystems = {
+    "/" = zdevice "zpool/ephemeral/root";
+    "/home" = zdevice "zpool/ephemeral/home";
+    "/persist" = zdevice "zpool/persistent/data" // { neededForBoot = true; };
+    "/nix" = zdevice "zpool/persistent/nix";
+    "/depot" = zdevice "zpool/persistent/depot";
+
+    "/boot" = {
+      device = "/dev/disk/by-uuid/B3B5-92F7";
+      fsType = "vfat";
+    };
+  };
+
+  hardware = {
+    enableRedistributableFirmware = true;
+    opengl.enable = true;
+    bluetooth.enable = true;
+  };
+
+  # TODO(tazjin): decide on this
+  services.libinput = {
+    enable = true;
+    # libinput thinks the touchpad is a mouse
+    mouse.naturalScrolling = false;
+    mouse.disableWhileTyping = true;
+  };
+  # services.xserver.libinput.touchpad.clickMethod = "clickfinger";
+  # services.xserver.libinput.touchpad.tapping = false;
+
+  nixpkgs.hostPlatform = lib.mkDefault "x86_64-linux";
+  system.stateVersion = "24.11";
+}
diff --git a/users/tazjin/nixos/default.nix b/users/tazjin/nixos/default.nix
index 8f82c39ea1..29b6a0e83b 100644
--- a/users/tazjin/nixos/default.nix
+++ b/users/tazjin/nixos/default.nix
@@ -2,6 +2,7 @@
 
 let systemFor = sys: (depot.ops.nixos.nixosFor sys).system;
 in depot.nix.readTree.drvTargets {
+  arbatSystem = systemFor depot.users.tazjin.nixos.arbat;
   camdenSystem = systemFor depot.users.tazjin.nixos.camden;
   frogSystem = systemFor depot.users.tazjin.nixos.frog;
   tverskoySystem = systemFor depot.users.tazjin.nixos.tverskoy;
diff --git a/users/tazjin/nixos/modules/desktop.nix b/users/tazjin/nixos/modules/desktop.nix
index 12a42b8faa..296960a443 100644
--- a/users/tazjin/nixos/modules/desktop.nix
+++ b/users/tazjin/nixos/modules/desktop.nix
@@ -12,14 +12,13 @@
 
     redshift.enable = true;
     blueman.enable = true;
+    libinput.enable = true;
 
     xserver = {
       enable = true;
       xkb.layout = "us";
       xkb.options = "caps:super";
 
-      libinput.enable = true;
-
       displayManager = {
         # Give EXWM permission to control the session.
         sessionCommands = "${pkgs.xorg.xhost}/bin/xhost +SI:localuser:$USER";
@@ -31,6 +30,7 @@
         name = "exwm";
         start = "${config.tazjin.emacs}/bin/tazjins-emacs --internal-border=0 --border-width=0";
       };
+      desktopManager.xfce.enable = true;
     };
   };
 
diff --git a/users/tazjin/nixos/modules/geesefs.nix b/users/tazjin/nixos/modules/geesefs.nix
index c45ee528f6..60ee821e2f 100644
--- a/users/tazjin/nixos/modules/geesefs.nix
+++ b/users/tazjin/nixos/modules/geesefs.nix
@@ -28,7 +28,7 @@
 
       mkdir -p $STATE_DIRECTORY/tazjins-files $STATE_DIRECTORY/cache
 
-      ${depot.third_party.geesefs}/bin/geesefs \
+      ${pkgs.geesefs}/bin/geesefs \
         -f -o allow_other \
         --cache $STATE_DIRECTORY/cache \
         --shared-config $CREDENTIALS_DIRECTORY/geesefs-tazjins-files \
diff --git a/users/tazjin/nixos/modules/home-config.nix b/users/tazjin/nixos/modules/home-config.nix
index bda8f7a440..77fe3f69bc 100644
--- a/users/tazjin/nixos/modules/home-config.nix
+++ b/users/tazjin/nixos/modules/home-config.nix
@@ -14,6 +14,8 @@
 
   nix.settings.trusted-users = [ "tazjin" ];
 
+  home-manager.backupFileExtension = "backup";
   home-manager.useGlobalPkgs = true;
-  home-manager.users.tazjin = depot.users.tazjin.home."${config.networking.hostName}";
+  home-manager.users.tazjin = with depot.users.tazjin;
+    home."${config.networking.hostName}" or home.shared;
 }
diff --git a/users/tazjin/nixos/modules/physical.nix b/users/tazjin/nixos/modules/physical.nix
index d469da7e5a..5ec527fa7c 100644
--- a/users/tazjin/nixos/modules/physical.nix
+++ b/users/tazjin/nixos/modules/physical.nix
@@ -45,6 +45,9 @@ in
         gdb
         git
         gnupg
+        go
+        gopls
+        gotools
         gtk3 # for gtk-launch
         htop
         hyperfine
diff --git a/users/tazjin/nixos/zamalek/default.nix b/users/tazjin/nixos/zamalek/default.nix
index a340e8a3e8..29effaa9bd 100644
--- a/users/tazjin/nixos/zamalek/default.nix
+++ b/users/tazjin/nixos/zamalek/default.nix
@@ -61,10 +61,6 @@ in
     hostId = "ee399356";
     networkmanager.enable = true;
 
-    extraHosts = ''
-      10.101.240.1 wifi.silja.fi
-    '';
-
     nameservers = [
       "8.8.8.8"
       "8.8.4.4"
@@ -82,7 +78,6 @@ in
   services.xserver.libinput.touchpad.tapping = false;
   services.avahi.enable = true;
   services.tailscale.enable = true;
-  powerManagement.powertop.enable = true;
 
   system.stateVersion = "21.11";
 }
diff --git a/web/pwcrypt/Cargo.lock b/web/pwcrypt/Cargo.lock
index 7d33ab4bf1..2a88ec59d5 100644
--- a/web/pwcrypt/Cargo.lock
+++ b/web/pwcrypt/Cargo.lock
@@ -818,9 +818,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
 
 [[package]]
 name = "wasm-bindgen"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1e124130aee3fb58c5bdd6b639a0509486b0338acaaae0c84a5124b0f588b7f"
+checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
 dependencies = [
  "cfg-if",
  "wasm-bindgen-macro",
@@ -828,9 +828,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-backend"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c9e7e1900c352b609c8488ad12639a311045f40a35491fb69ba8c12f758af70b"
+checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
 dependencies = [
  "bumpalo",
  "log",
@@ -855,9 +855,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b30af9e2d358182b5c7449424f017eba305ed32a7010509ede96cdc4696c46ed"
+checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
 dependencies = [
  "quote",
  "wasm-bindgen-macro-support",
@@ -865,9 +865,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro-support"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66"
+checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -878,9 +878,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-shared"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4f186bd2dcf04330886ce82d6f33dd75a7bfcf69ecf5763b89fcde53b6ac9838"
+checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
 
 [[package]]
 name = "web-sys"
diff --git a/web/pwcrypt/Cargo.toml b/web/pwcrypt/Cargo.toml
index 6c0a6e5b6d..488dcad19e 100644
--- a/web/pwcrypt/Cargo.toml
+++ b/web/pwcrypt/Cargo.toml
@@ -8,6 +8,6 @@ argon2 = "0.5.0"
 getrandom = { version = "0.2.10", features = ["js"] }
 gloo = "0.8.0"
 rand_core = { version = "0.6.4", features = ["getrandom"] }
-wasm-bindgen = "= 0.2.91"
+wasm-bindgen = "= 0.2.92"
 web-sys = "0.3"
 yew = { version = "0.20.0", features = [ "csr" ]}
diff --git a/web/tvixbolt/Cargo.lock b/web/tvixbolt/Cargo.lock
index d3c5faf10c..d7a23d7854 100644
--- a/web/tvixbolt/Cargo.lock
+++ b/web/tvixbolt/Cargo.lock
@@ -964,7 +964,6 @@ dependencies = [
  "tabwriter",
  "toml",
  "tvix-eval-builtin-macros",
- "xml-rs",
 ]
 
 [[package]]
@@ -1023,9 +1022,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
 
 [[package]]
 name = "wasm-bindgen"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1e124130aee3fb58c5bdd6b639a0509486b0338acaaae0c84a5124b0f588b7f"
+checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
 dependencies = [
  "cfg-if",
  "wasm-bindgen-macro",
@@ -1033,9 +1032,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-backend"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c9e7e1900c352b609c8488ad12639a311045f40a35491fb69ba8c12f758af70b"
+checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
 dependencies = [
  "bumpalo",
  "log",
@@ -1060,9 +1059,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b30af9e2d358182b5c7449424f017eba305ed32a7010509ede96cdc4696c46ed"
+checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
 dependencies = [
  "quote",
  "wasm-bindgen-macro-support",
@@ -1070,9 +1069,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro-support"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66"
+checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1083,9 +1082,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-shared"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4f186bd2dcf04330886ce82d6f33dd75a7bfcf69ecf5763b89fcde53b6ac9838"
+checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
 
 [[package]]
 name = "web-sys"
@@ -1129,12 +1128,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
 
 [[package]]
-name = "xml-rs"
-version = "0.8.19"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0fcb9cbac069e033553e8bb871be2fbdffcab578eb25bd0f7c508cedc6dcd75a"
-
-[[package]]
 name = "yew"
 version = "0.19.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/web/tvixbolt/Cargo.toml b/web/tvixbolt/Cargo.toml
index ce5ffb90e3..dafbe5ad60 100644
--- a/web/tvixbolt/Cargo.toml
+++ b/web/tvixbolt/Cargo.toml
@@ -11,7 +11,7 @@ serde_urlencoded = "*" # pinned by yew
 rnix = "0.11.0"
 
 # needs to be in sync with nixpkgs
-wasm-bindgen = "= 0.2.91"
+wasm-bindgen = "= 0.2.92"
 
 [dependencies.tvix-eval]
 path = "../../tvix/eval"