about summary refs log tree commit diff
path: root/tvix
diff options
context:
space:
mode:
Diffstat (limited to 'tvix')
-rw-r--r--tvix/Cargo.lock1634
-rw-r--r--tvix/Cargo.nix5897
-rw-r--r--tvix/Cargo.toml126
-rw-r--r--tvix/README.md7
-rw-r--r--tvix/boot/README.md39
-rw-r--r--tvix/boot/default.nix15
-rw-r--r--tvix/boot/tests/default.nix147
-rw-r--r--tvix/build-go/build.pb.go18
-rw-r--r--tvix/build-go/rpc_build.pb.go4
-rw-r--r--tvix/build/Cargo.toml36
-rw-r--r--tvix/build/build.rs12
-rw-r--r--tvix/build/default.nix12
-rw-r--r--tvix/build/protos/build.proto4
-rw-r--r--tvix/build/protos/default.nix21
-rw-r--r--tvix/build/src/bin/tvix-build.rs57
-rw-r--r--tvix/build/src/proto/mod.rs29
-rw-r--r--tvix/castore-go/castore.pb.go16
-rw-r--r--tvix/castore-go/rpc_blobstore.pb.go16
-rw-r--r--tvix/castore-go/rpc_directory.pb.go10
-rw-r--r--tvix/castore/Cargo.toml144
-rw-r--r--tvix/castore/build.rs12
-rw-r--r--tvix/castore/default.nix35
-rw-r--r--tvix/castore/protos/default.nix18
-rw-r--r--tvix/castore/src/blobservice/combinator.rs96
-rw-r--r--tvix/castore/src/blobservice/from_addr.rs55
-rw-r--r--tvix/castore/src/blobservice/grpc.rs71
-rw-r--r--tvix/castore/src/blobservice/memory.rs30
-rw-r--r--tvix/castore/src/blobservice/mod.rs19
-rw-r--r--tvix/castore/src/blobservice/naive_seeker.rs265
-rw-r--r--tvix/castore/src/blobservice/object_store.rs165
-rw-r--r--tvix/castore/src/blobservice/tests/utils.rs3
-rw-r--r--tvix/castore/src/composition.rs541
-rw-r--r--tvix/castore/src/digests.rs13
-rw-r--r--tvix/castore/src/directoryservice/bigtable.rs131
-rw-r--r--tvix/castore/src/directoryservice/closure_validator.rs309
-rw-r--r--tvix/castore/src/directoryservice/combinators.rs180
-rw-r--r--tvix/castore/src/directoryservice/directory_graph.rs414
-rw-r--r--tvix/castore/src/directoryservice/from_addr.rs123
-rw-r--r--tvix/castore/src/directoryservice/grpc.rs135
-rw-r--r--tvix/castore/src/directoryservice/memory.rs62
-rw-r--r--tvix/castore/src/directoryservice/mod.rs67
-rw-r--r--tvix/castore/src/directoryservice/object_store.rs116
-rw-r--r--tvix/castore/src/directoryservice/order_validator.rs188
-rw-r--r--tvix/castore/src/directoryservice/redb.rs303
-rw-r--r--tvix/castore/src/directoryservice/simple_putter.rs17
-rw-r--r--tvix/castore/src/directoryservice/sled.rs132
-rw-r--r--tvix/castore/src/directoryservice/tests/mod.rs115
-rw-r--r--tvix/castore/src/directoryservice/tests/utils.rs3
-rw-r--r--tvix/castore/src/directoryservice/traverse.rs56
-rw-r--r--tvix/castore/src/directoryservice/utils.rs60
-rw-r--r--tvix/castore/src/errors.rs84
-rw-r--r--tvix/castore/src/fixtures.rs118
-rw-r--r--tvix/castore/src/fs/fuse/mod.rs (renamed from tvix/castore/src/fs/fuse.rs)59
-rw-r--r--tvix/castore/src/fs/fuse/tests.rs (renamed from tvix/castore/src/fs/tests.rs)164
-rw-r--r--tvix/castore/src/fs/inodes.rs33
-rw-r--r--tvix/castore/src/fs/mod.rs106
-rw-r--r--tvix/castore/src/fs/root_nodes.rs22
-rw-r--r--tvix/castore/src/import/archive.rs2
-rw-r--r--tvix/castore/src/import/blobs.rs13
-rw-r--r--tvix/castore/src/import/fs.rs57
-rw-r--r--tvix/castore/src/import/mod.rs91
-rw-r--r--tvix/castore/src/lib.rs8
-rw-r--r--tvix/castore/src/nodes/directory.rs287
-rw-r--r--tvix/castore/src/nodes/mod.rs48
-rw-r--r--tvix/castore/src/nodes/symlink_target.rs223
-rw-r--r--tvix/castore/src/path/component.rs268
-rw-r--r--tvix/castore/src/path/mod.rs (renamed from tvix/castore/src/path.rs)48
-rw-r--r--tvix/castore/src/proto/grpc_directoryservice_wrapper.rs32
-rw-r--r--tvix/castore/src/proto/mod.rs561
-rw-r--r--tvix/castore/src/proto/tests/directory.rs200
-rw-r--r--tvix/castore/src/proto/tests/directory_nodes_iterator.rs78
-rw-r--r--tvix/castore/src/proto/tests/mod.rs1
-rw-r--r--tvix/castore/src/tests/import.rs41
-rw-r--r--tvix/castore/src/tonic.rs6
-rw-r--r--tvix/cli/Cargo.toml31
-rw-r--r--tvix/cli/default.nix69
-rw-r--r--tvix/cli/src/args.rs86
-rw-r--r--tvix/cli/src/assignment.rs74
-rw-r--r--tvix/cli/src/lib.rs270
-rw-r--r--tvix/cli/src/main.rs321
-rw-r--r--tvix/cli/src/repl.rs274
-rw-r--r--tvix/cli/tests/.skip-tree (renamed from tvix/eval/src/tests/nix_tests/notyetpassing/readDir/bar)0
-rw-r--r--tvix/cli/tests/import.nix1
-rw-r--r--tvix/cli/tests/repl.rs98
-rw-r--r--tvix/cli/tests/six.nix1
-rw-r--r--tvix/clippy.toml4
-rw-r--r--tvix/crate-hashes.json2
-rw-r--r--tvix/default.nix145
-rw-r--r--tvix/docs/book.toml15
-rw-r--r--tvix/docs/default.nix3
-rw-r--r--tvix/docs/mdbook-admonish.css348
-rw-r--r--tvix/docs/mdbook-extra.css7
-rw-r--r--tvix/docs/mdbook-extra.js24
-rw-r--r--tvix/docs/src/SUMMARY.md43
-rw-r--r--tvix/docs/src/TODO.md151
-rw-r--r--tvix/docs/src/architecture.md9
-rw-r--r--tvix/docs/src/build/index.md59
-rw-r--r--tvix/docs/src/castore/blobstore-chunking.md (renamed from tvix/castore/docs/blobstore-chunking.md)6
-rw-r--r--tvix/docs/src/castore/blobstore-protocol.md (renamed from tvix/castore/docs/blobstore-protocol.md)4
-rw-r--r--tvix/docs/src/castore/data-model.md (renamed from tvix/castore/docs/data-model.md)8
-rw-r--r--tvix/docs/src/castore/why-not-git-trees.md (renamed from tvix/castore/docs/why-not-git-trees.md)2
-rw-r--r--tvix/docs/src/community.md23
-rw-r--r--tvix/docs/src/contributing/code-&-commits.md76
-rw-r--r--tvix/docs/src/contributing/email.md33
-rw-r--r--tvix/docs/src/contributing/gerrit.md110
-rw-r--r--tvix/docs/src/eval/abandoned/index.md3
-rw-r--r--tvix/docs/src/eval/abandoned/thread-local-vm.md (renamed from tvix/eval/docs/abandoned/thread-local-vm.md)0
-rw-r--r--tvix/docs/src/eval/bindings.md (renamed from tvix/eval/docs/bindings.md)9
-rw-r--r--tvix/docs/src/eval/build-references.md (renamed from tvix/eval/docs/build-references.md)17
-rw-r--r--tvix/docs/src/eval/builtins.md (renamed from tvix/eval/docs/builtins.md)3
-rw-r--r--tvix/docs/src/eval/catchable-errors.md (renamed from tvix/eval/docs/catchable-errors.md)0
-rw-r--r--tvix/docs/src/eval/known-optimisation-potential.md (renamed from tvix/eval/docs/known-optimisation-potential.md)3
-rw-r--r--tvix/docs/src/eval/language-issues.md (renamed from tvix/eval/docs/language-issues.md)0
-rw-r--r--tvix/docs/src/eval/opcodes-attrsets.md (renamed from tvix/eval/docs/opcodes-attrsets.md)0
-rw-r--r--tvix/docs/src/eval/recursive-attrs.md (renamed from tvix/eval/docs/recursive-attrs.md)3
-rw-r--r--tvix/docs/src/eval/vm-loop.md (renamed from tvix/eval/docs/vm-loop.md)3
-rw-r--r--tvix/docs/src/getting-started.md59
-rw-r--r--tvix/docs/src/introduction.md23
-rw-r--r--tvix/docs/src/language-spec.md4
-rw-r--r--tvix/docs/src/nix-daemon/changelog.md2
-rw-r--r--tvix/docs/src/nix-daemon/handshake.md32
-rw-r--r--tvix/docs/src/nix-daemon/index.md15
-rw-r--r--tvix/docs/src/nix-daemon/logging.md42
-rw-r--r--tvix/docs/src/nix-daemon/operations.md176
-rw-r--r--tvix/docs/src/nix-daemon/serialization.md202
-rw-r--r--tvix/docs/src/store/api.md (renamed from tvix/store/docs/api.md)9
-rw-r--r--tvix/docs/src/value-pointer-equality.md6
-rw-r--r--tvix/eval/Cargo.toml71
-rw-r--r--tvix/eval/benches/eval.rs8
-rw-r--r--tvix/eval/build.rs7
-rw-r--r--tvix/eval/builtin-macros/Cargo.toml6
-rw-r--r--tvix/eval/clippy.toml3
-rw-r--r--tvix/eval/default.nix13
-rw-r--r--tvix/eval/src/builtins/impure.rs20
-rw-r--r--tvix/eval/src/builtins/mod.rs176
-rw-r--r--tvix/eval/src/builtins/to_xml.rs275
-rw-r--r--tvix/eval/src/chunk.rs285
-rw-r--r--tvix/eval/src/compiler/bindings.rs73
-rw-r--r--tvix/eval/src/compiler/import.rs1
-rw-r--r--tvix/eval/src/compiler/mod.rs390
-rw-r--r--tvix/eval/src/compiler/scope.rs40
-rw-r--r--tvix/eval/src/errors.rs36
-rw-r--r--tvix/eval/src/io.rs49
-rw-r--r--tvix/eval/src/lib.rs423
-rw-r--r--tvix/eval/src/nix_search_path.rs2
-rw-r--r--tvix/eval/src/observer.rs14
-rw-r--r--tvix/eval/src/opcode.rs277
-rw-r--r--tvix/eval/src/spans.rs58
-rw-r--r--tvix/eval/src/tests/mod.rs199
-rw-r--r--tvix/eval/src/tests/nix_tests.rs210
-rw-r--r--tvix/eval/src/tests/nix_tests/eval-okay-readDir.exp (renamed from tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-readDir.exp)0
-rw-r--r--tvix/eval/src/tests/nix_tests/eval-okay-readDir.nix (renamed from tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-readDir.nix.disabled)0
-rw-r--r--tvix/eval/src/tests/nix_tests/eval-okay-readFileType.exp (renamed from tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-readFileType.exp)0
-rw-r--r--tvix/eval/src/tests/nix_tests/eval-okay-readFileType.nix (renamed from tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-readFileType.nix)0
-rw-r--r--tvix/eval/src/tests/nix_tests/readDir/bar (renamed from tvix/eval/src/tests/tvix_tests/readDir/bar)0
-rw-r--r--tvix/eval/src/tests/nix_tests/readDir/foo/git-hates-directories (renamed from tvix/eval/src/tests/nix_tests/notyetpassing/readDir/foo/git-hates-directories)0
l---------tvix/eval/src/tests/nix_tests/readDir/ldir (renamed from tvix/eval/src/tests/nix_tests/notyetpassing/readDir/ldir)0
l---------tvix/eval/src/tests/nix_tests/readDir/linked (renamed from tvix/eval/src/tests/nix_tests/notyetpassing/readDir/linked)0
-rw-r--r--tvix/eval/src/tests/one_offs.rs9
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.exp1
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.nix10
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-path-exists-child-of-file.exp1
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-path-exists-child-of-file.nix1
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-readDir.exp1
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-readDir.nix.disabled1
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.exp.xml5
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.nix1
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-toxml.exp1
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-toxml.nix2
-rw-r--r--tvix/eval/src/value/arbitrary.rs12
-rw-r--r--tvix/eval/src/value/attrs.rs190
-rw-r--r--tvix/eval/src/value/attrs/tests.rs8
-rw-r--r--tvix/eval/src/value/json.rs8
-rw-r--r--tvix/eval/src/value/list.rs23
-rw-r--r--tvix/eval/src/value/mod.rs89
-rw-r--r--tvix/eval/src/value/string/context.rs161
-rw-r--r--tvix/eval/src/value/string/mod.rs (renamed from tvix/eval/src/value/string.rs)320
-rw-r--r--tvix/eval/src/value/thunk.rs61
-rw-r--r--tvix/eval/src/vm/generators.rs98
-rw-r--r--tvix/eval/src/vm/macros.rs2
-rw-r--r--tvix/eval/src/vm/mod.rs401
-rw-r--r--tvix/eval/tests/nix_oracle.rs14
-rw-r--r--tvix/glue/Cargo.toml63
-rw-r--r--tvix/glue/benches/eval.rs42
-rw-r--r--tvix/glue/build.rs6
-rw-r--r--tvix/glue/default.nix13
-rw-r--r--tvix/glue/src/builtins/derivation.rs119
-rw-r--r--tvix/glue/src/builtins/errors.rs6
-rw-r--r--tvix/glue/src/builtins/fetchers.rs60
-rw-r--r--tvix/glue/src/builtins/import.rs221
-rw-r--r--tvix/glue/src/builtins/mod.rs50
-rw-r--r--tvix/glue/src/fetchers/decompression.rs6
-rw-r--r--tvix/glue/src/fetchers/mod.rs472
-rw-r--r--tvix/glue/src/fetchurl.rs82
-rw-r--r--tvix/glue/src/known_paths.rs85
-rw-r--r--tvix/glue/src/lib.rs18
-rw-r--r--tvix/glue/src/refscan.rs268
-rw-r--r--tvix/glue/src/tests/empty-file (renamed from tvix/eval/src/tests/tvix_tests/readDir/foo/.keep)0
-rw-r--r--tvix/glue/src/tests/mod.rs37
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-fail-fetchtarball-invalid-attrs.nix5
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-fail-fetchtarball-invalid-url.nix1
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-fail-fetchurl-invalid-attrs.nix5
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-fail-fetchurl-invalid-url.nix1
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-fail-tofile-wrongctxtype.nix3
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-context-introspection.nix2
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.exp2
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.nix40
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-storePath.exp1
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-storePath.nix9
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-tofile.exp1
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-tofile.nix11
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.exp1
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.nix14
-rw-r--r--tvix/glue/src/tvix_build.rs47
-rw-r--r--tvix/glue/src/tvix_io.rs10
-rw-r--r--tvix/glue/src/tvix_store_io.rs270
-rw-r--r--tvix/nar-bridge-go/.gitignore2
-rw-r--r--tvix/nar-bridge-go/README.md7
-rw-r--r--tvix/nar-bridge-go/cmd/nar-bridge-http/main.go93
-rw-r--r--tvix/nar-bridge-go/cmd/nar-bridge-http/otel.go87
-rw-r--r--tvix/nar-bridge-go/default.nix10
-rw-r--r--tvix/nar-bridge-go/go.mod54
-rw-r--r--tvix/nar-bridge-go/go.sum120
-rw-r--r--tvix/nar-bridge-go/pkg/http/nar_get.go197
-rw-r--r--tvix/nar-bridge-go/pkg/http/nar_put.go141
-rw-r--r--tvix/nar-bridge-go/pkg/http/narinfo.go51
-rw-r--r--tvix/nar-bridge-go/pkg/http/narinfo_get.go137
-rw-r--r--tvix/nar-bridge-go/pkg/http/narinfo_put.go103
-rw-r--r--tvix/nar-bridge-go/pkg/http/server.go119
-rw-r--r--tvix/nar-bridge-go/pkg/http/util.go24
-rw-r--r--tvix/nar-bridge-go/pkg/importer/blob_upload.go71
-rw-r--r--tvix/nar-bridge-go/pkg/importer/counting_writer.go21
-rw-r--r--tvix/nar-bridge-go/pkg/importer/directory_upload.go88
-rw-r--r--tvix/nar-bridge-go/pkg/importer/gen_pathinfo.go62
-rw-r--r--tvix/nar-bridge-go/pkg/importer/importer.go303
-rw-r--r--tvix/nar-bridge-go/pkg/importer/importer_test.go537
-rw-r--r--tvix/nar-bridge-go/pkg/importer/roundtrip_test.go85
-rw-r--r--tvix/nar-bridge-go/pkg/importer/util_test.go34
-rw-r--r--tvix/nar-bridge-go/testdata/emptydirectory.narbin96 -> 0 bytes
-rw-r--r--tvix/nar-bridge-go/testdata/nar_1094wph9z4nwlgvsd53abfz8i117ykiv5dwnq9nnhz846s7xqd7d.narbin464152 -> 0 bytes
-rw-r--r--tvix/nar-bridge-go/testdata/onebyteexecutable.narbin152 -> 0 bytes
-rw-r--r--tvix/nar-bridge-go/testdata/onebyteregular.narbin120 -> 0 bytes
-rw-r--r--tvix/nar-bridge-go/testdata/popdirectories.narbin600 -> 0 bytes
-rw-r--r--tvix/nar-bridge-go/testdata/symlink.narbin136 -> 0 bytes
-rw-r--r--tvix/nar-bridge/Cargo.toml46
-rw-r--r--tvix/nar-bridge/default.nix11
-rw-r--r--tvix/nar-bridge/src/bin/nar-bridge.rs92
-rw-r--r--tvix/nar-bridge/src/lib.rs84
-rw-r--r--tvix/nar-bridge/src/nar.rs143
-rw-r--r--tvix/nar-bridge/src/narinfo.rs162
-rw-r--r--tvix/nix-compat-derive-tests/Cargo.toml24
-rw-r--r--tvix/nix-compat-derive-tests/default.nix5
-rw-r--r--tvix/nix-compat-derive-tests/tests/read_derive.rs417
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui.rs6
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_bad_type.rs10
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_bad_type.stderr21
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_enum_non_exaustive.rs13
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_enum_non_exaustive.stderr8
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_from_missing.rs7
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_from_missing.stderr5
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_from_str_error_not_display.rs20
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_from_str_error_not_display.stderr13
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_from_str_missing.rs7
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_from_str_missing.stderr16
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_missing_default.rs12
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_missing_default.stderr12
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_missing_default_path.rs12
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_missing_default_path.stderr8
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_remote_missing_attr.rs15
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_remote_missing_attr.stderr5
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_try_from_error_not_display.rs19
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_try_from_error_not_display.stderr13
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_try_from_missing.rs7
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/deserialize_try_from_missing.stderr8
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/parse_bad_default.rs9
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/parse_bad_default.stderr5
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/parse_bad_default_path.rs9
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/parse_bad_default_path.stderr5
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/parse_bad_nix.rs9
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/parse_bad_nix.stderr5
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/parse_bad_version.rs9
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/parse_bad_version.stderr5
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/parse_mising_version.rs9
-rw-r--r--tvix/nix-compat-derive-tests/tests/ui/parse_mising_version.stderr5
-rw-r--r--tvix/nix-compat-derive/Cargo.toml24
-rw-r--r--tvix/nix-compat-derive/default.nix5
-rw-r--r--tvix/nix-compat-derive/src/de.rs272
-rw-r--r--tvix/nix-compat-derive/src/internal/attrs.rs358
-rw-r--r--tvix/nix-compat-derive/src/internal/ctx.rs50
-rw-r--r--tvix/nix-compat-derive/src/internal/inputs.rs110
-rw-r--r--tvix/nix-compat-derive/src/internal/mod.rs183
-rw-r--r--tvix/nix-compat-derive/src/internal/symbol.rs32
-rw-r--r--tvix/nix-compat-derive/src/lib.rs303
-rw-r--r--tvix/nix-compat/Cargo.toml69
-rw-r--r--tvix/nix-compat/benches/derivation_parse_aterm.rs4
-rw-r--r--tvix/nix-compat/benches/narinfo_parse.rs4
-rw-r--r--tvix/nix-compat/build.rs5
-rw-r--r--tvix/nix-compat/default.nix14
-rw-r--r--tvix/nix-compat/src/aterm/mod.rs4
-rw-r--r--tvix/nix-compat/src/aterm/parser.rs28
-rw-r--r--tvix/nix-compat/src/bin/drvfmt.rs5
-rw-r--r--tvix/nix-compat/src/derivation/mod.rs12
-rw-r--r--tvix/nix-compat/src/derivation/output.rs7
-rw-r--r--tvix/nix-compat/src/derivation/parse_error.rs2
-rw-r--r--tvix/nix-compat/src/derivation/parser.rs75
-rw-r--r--tvix/nix-compat/src/derivation/write.rs23
-rw-r--r--tvix/nix-compat/src/lib.rs6
-rw-r--r--tvix/nix-compat/src/nar/listing/mod.rs128
-rw-r--r--tvix/nix-compat/src/nar/listing/test.rs59
-rw-r--r--tvix/nix-compat/src/nar/mod.rs1
-rw-r--r--tvix/nix-compat/src/nar/reader/mod.rs4
-rw-r--r--tvix/nix-compat/src/nar/tests/nixos-release.ls1
-rw-r--r--tvix/nix-compat/src/nar/wire/mod.rs6
-rw-r--r--tvix/nix-compat/src/nar/writer/sync.rs23
-rw-r--r--tvix/nix-compat/src/narinfo/mod.rs77
-rw-r--r--tvix/nix-compat/src/narinfo/signature.rs117
-rw-r--r--tvix/nix-compat/src/narinfo/signing_keys.rs119
-rw-r--r--tvix/nix-compat/src/narinfo/verifying_keys.rs (renamed from tvix/nix-compat/src/narinfo/public_keys.rs)39
-rw-r--r--tvix/nix-compat/src/nix_daemon/de/bytes.rs70
-rw-r--r--tvix/nix-compat/src/nix_daemon/de/collections.rs105
-rw-r--r--tvix/nix-compat/src/nix_daemon/de/int.rs100
-rw-r--r--tvix/nix-compat/src/nix_daemon/de/mock.rs261
-rw-r--r--tvix/nix-compat/src/nix_daemon/de/mod.rs225
-rw-r--r--tvix/nix-compat/src/nix_daemon/de/reader.rs527
-rw-r--r--tvix/nix-compat/src/nix_daemon/mod.rs2
-rw-r--r--tvix/nix-compat/src/nix_daemon/protocol_version.rs16
-rw-r--r--tvix/nix-compat/src/nix_http/mod.rs115
-rw-r--r--tvix/nix-compat/src/nixbase32.rs15
-rw-r--r--tvix/nix-compat/src/nixcpp/conf.rs202
-rw-r--r--tvix/nix-compat/src/nixcpp/mod.rs9
-rw-r--r--tvix/nix-compat/src/nixhash/ca_hash.rs39
-rw-r--r--tvix/nix-compat/src/store_path/mod.rs323
-rw-r--r--tvix/nix-compat/src/store_path/utils.rs42
-rw-r--r--tvix/nix-compat/src/wire/bytes/mod.rs22
-rw-r--r--tvix/nix-compat/src/wire/bytes/reader/mod.rs6
-rw-r--r--tvix/nix-compat/testdata/nix.conf20
-rw-r--r--tvix/nix-compat/testdata/other_nix.conf18
-rw-r--r--tvix/serde/Cargo.toml4
-rw-r--r--tvix/serde/examples/nixpkgs.rs4
-rw-r--r--tvix/serde/src/de.rs14
-rw-r--r--tvix/serde/src/de_tests.rs9
-rw-r--r--tvix/shell.nix21
-rw-r--r--tvix/store-go/pathinfo.pb.go14
-rw-r--r--tvix/store-go/rpc_pathinfo.pb.go12
-rw-r--r--tvix/store/Cargo.toml108
-rw-r--r--tvix/store/build.rs12
-rw-r--r--tvix/store/default.nix48
-rw-r--r--tvix/store/protos/default.nix21
-rw-r--r--tvix/store/src/bin/tvix-store.rs378
-rw-r--r--tvix/store/src/composition.rs22
-rw-r--r--tvix/store/src/import.rs60
-rw-r--r--tvix/store/src/lib.rs1
-rw-r--r--tvix/store/src/nar/import.rs75
-rw-r--r--tvix/store/src/nar/mod.rs17
-rw-r--r--tvix/store/src/nar/renderer.rs84
-rw-r--r--tvix/store/src/pathinfoservice/bigtable.rs144
-rw-r--r--tvix/store/src/pathinfoservice/combinators.rs38
-rw-r--r--tvix/store/src/pathinfoservice/from_addr.rs163
-rw-r--r--tvix/store/src/pathinfoservice/fs/mod.rs35
-rw-r--r--tvix/store/src/pathinfoservice/grpc.rs86
-rw-r--r--tvix/store/src/pathinfoservice/lru.rs42
-rw-r--r--tvix/store/src/pathinfoservice/memory.rs28
-rw-r--r--tvix/store/src/pathinfoservice/mod.rs43
-rw-r--r--tvix/store/src/pathinfoservice/nix_http.rs120
-rw-r--r--tvix/store/src/pathinfoservice/redb.rs218
-rw-r--r--tvix/store/src/pathinfoservice/sled.rs77
-rw-r--r--tvix/store/src/pathinfoservice/tests/mod.rs12
-rw-r--r--tvix/store/src/pathinfoservice/tests/utils.rs10
-rw-r--r--tvix/store/src/proto/grpc_pathinfoservice_wrapper.rs29
-rw-r--r--tvix/store/src/proto/mod.rs77
-rw-r--r--tvix/store/src/proto/tests/pathinfo.rs54
-rw-r--r--tvix/store/src/tests/nar_renderer.rs51
-rw-r--r--tvix/store/src/utils.rs234
-rw-r--r--tvix/tools/crunch-v2/Cargo.lock73
-rw-r--r--tvix/tools/crunch-v2/Cargo.nix511
-rw-r--r--tvix/tools/crunch-v2/Cargo.toml2
-rw-r--r--tvix/tools/crunch-v2/default.nix22
-rw-r--r--tvix/tools/narinfo2parquet/Cargo.lock72
-rw-r--r--tvix/tools/narinfo2parquet/Cargo.nix414
-rw-r--r--tvix/tools/narinfo2parquet/default.nix12
-rw-r--r--tvix/tools/narinfo2parquet/src/main.rs46
-rw-r--r--tvix/tools/turbofetch/Cargo.lock32
-rw-r--r--tvix/tools/turbofetch/Cargo.nix240
-rw-r--r--tvix/tools/turbofetch/Cargo.toml2
-rw-r--r--tvix/tools/turbofetch/default.nix13
-rw-r--r--tvix/tools/weave/Cargo.lock91
-rw-r--r--tvix/tools/weave/Cargo.nix467
-rw-r--r--tvix/tools/weave/default.nix12
-rw-r--r--tvix/tracing/Cargo.toml54
-rw-r--r--tvix/tracing/default.nix11
-rw-r--r--tvix/tracing/src/lib.rs333
-rw-r--r--tvix/tracing/src/propagate/axum.rs48
-rw-r--r--tvix/tracing/src/propagate/mod.rs8
-rw-r--r--tvix/tracing/src/propagate/reqwest.rs13
-rw-r--r--tvix/tracing/src/propagate/tonic.rs57
-rw-r--r--tvix/utils.nix160
-rw-r--r--tvix/verify-lang-tests/default.nix69
397 files changed, 25389 insertions, 11124 deletions
diff --git a/tvix/Cargo.lock b/tvix/Cargo.lock
index dc5298c45b5b..7f92e885c3d1 100644
--- a/tvix/Cargo.lock
+++ b/tvix/Cargo.lock
@@ -67,47 +67,48 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
 
 [[package]]
 name = "anstream"
-version = "0.6.11"
+version = "0.6.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6e2e1ebcb11de5c03c67de28a7df593d32191b44939c482e97702baaaa6ab6a5"
+checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526"
 dependencies = [
  "anstyle",
  "anstyle-parse",
  "anstyle-query",
  "anstyle-wincon",
  "colorchoice",
+ "is_terminal_polyfill",
  "utf8parse",
 ]
 
 [[package]]
 name = "anstyle"
-version = "1.0.4"
+version = "1.0.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87"
+checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1"
 
 [[package]]
 name = "anstyle-parse"
-version = "0.2.3"
+version = "0.2.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c"
+checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb"
 dependencies = [
  "utf8parse",
 ]
 
 [[package]]
 name = "anstyle-query"
-version = "1.0.2"
+version = "1.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648"
+checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a"
 dependencies = [
  "windows-sys 0.52.0",
 ]
 
 [[package]]
 name = "anstyle-wincon"
-version = "3.0.2"
+version = "3.0.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7"
+checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8"
 dependencies = [
  "anstyle",
  "windows-sys 0.52.0",
@@ -115,9 +116,9 @@ dependencies = [
 
 [[package]]
 name = "anyhow"
-version = "1.0.79"
+version = "1.0.86"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca"
+checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
 
 [[package]]
 name = "arc-swap"
@@ -152,9 +153,9 @@ dependencies = [
 
 [[package]]
 name = "async-compression"
-version = "0.4.9"
+version = "0.4.12"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4e9eabd7a98fe442131a17c316bd9349c43695e49e730c3c8e12cfb5f4da2693"
+checksum = "fec134f64e2bc57411226dfc4e52dec859ddfc7e711fc5e07b612584f000e4aa"
 dependencies = [
  "bzip2",
  "flate2",
@@ -208,20 +209,22 @@ dependencies = [
 
 [[package]]
 name = "async-process"
-version = "2.1.0"
+version = "2.2.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "451e3cf68011bd56771c79db04a9e333095ab6349f7e47592b788e9b98720cc8"
+checksum = "a8a07789659a4d385b79b18b9127fc27e1a59e1e89117c78c5ea3b806f016374"
 dependencies = [
  "async-channel",
  "async-io",
  "async-lock 3.3.0",
  "async-signal",
+ "async-task",
  "blocking",
  "cfg-if",
  "event-listener 5.2.0",
  "futures-lite",
  "rustix",
- "windows-sys 0.52.0",
+ "tracing",
+ "windows-sys 0.59.0",
 ]
 
 [[package]]
@@ -261,7 +264,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193"
 dependencies = [
  "proc-macro2",
  "quote",
- "syn 2.0.48",
+ "syn 2.0.76",
 ]
 
 [[package]]
@@ -288,7 +291,7 @@ checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9"
 dependencies = [
  "proc-macro2",
  "quote",
- "syn 2.0.48",
+ "syn 2.0.76",
 ]
 
 [[package]]
@@ -305,47 +308,57 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
 
 [[package]]
 name = "axum"
-version = "0.6.20"
+version = "0.7.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf"
+checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf"
 dependencies = [
  "async-trait",
  "axum-core",
- "bitflags 1.3.2",
  "bytes",
  "futures-util",
  "http",
  "http-body",
+ "http-body-util",
  "hyper",
+ "hyper-util",
  "itoa",
- "matchit",
+ "matchit 0.7.3",
  "memchr",
  "mime",
  "percent-encoding",
  "pin-project-lite",
  "rustversion",
  "serde",
- "sync_wrapper",
+ "serde_json",
+ "serde_path_to_error",
+ "serde_urlencoded",
+ "sync_wrapper 1.0.1",
+ "tokio",
  "tower",
  "tower-layer",
  "tower-service",
+ "tracing",
 ]
 
 [[package]]
 name = "axum-core"
-version = "0.3.4"
+version = "0.4.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c"
+checksum = "a15c63fd72d41492dc4f497196f5da1fb04fb7529e631d73630d1b491e47a2e3"
 dependencies = [
  "async-trait",
  "bytes",
  "futures-util",
  "http",
  "http-body",
+ "http-body-util",
  "mime",
+ "pin-project-lite",
  "rustversion",
+ "sync_wrapper 0.1.2",
  "tower-layer",
  "tower-service",
+ "tracing",
 ]
 
 [[package]]
@@ -370,6 +383,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
 
 [[package]]
+name = "base64"
+version = "0.22.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
+
+[[package]]
 name = "base64ct"
 version = "1.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -377,17 +396,17 @@ checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b"
 
 [[package]]
 name = "bigtable_rs"
-version = "0.2.9"
-source = "git+https://github.com/flokli/bigtable_rs?rev=0af404741dfc40eb9fa99cf4d4140a09c5c20df7#0af404741dfc40eb9fa99cf4d4140a09c5c20df7"
+version = "0.2.10"
+source = "git+https://github.com/liufuyang/bigtable_rs?rev=1818355a5373a5bc2c84287e3a4e3807154ac8ef#1818355a5373a5bc2c84287e3a4e3807154ac8ef"
 dependencies = [
  "gcp_auth",
  "http",
+ "hyper-util",
  "log",
  "prost",
  "prost-build",
  "prost-types",
  "prost-wkt",
- "prost-wkt-build",
  "prost-wkt-types",
  "serde",
  "serde_with",
@@ -399,21 +418,6 @@ dependencies = [
 ]
 
 [[package]]
-name = "bit-set"
-version = "0.5.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1"
-dependencies = [
- "bit-vec",
-]
-
-[[package]]
-name = "bit-vec"
-version = "0.6.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
-
-[[package]]
 name = "bitflags"
 version = "1.3.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -421,21 +425,15 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
 
 [[package]]
 name = "bitflags"
-version = "2.4.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf"
-
-[[package]]
-name = "bitmaps"
-version = "3.2.0"
+version = "2.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "703642b98a00b3b90513279a8ede3fcfa479c126c5fb46e78f3051522f021403"
+checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
 
 [[package]]
 name = "blake3"
-version = "1.5.0"
+version = "1.5.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0231f06152bf547e9c2b5194f247cd97aacf6dcd8b15d8e5ec0663f64580da87"
+checksum = "d82033247fd8e890df8f740e407ad4d038debb9eb1f40533fffb32e7d17dc6f7"
 dependencies = [
  "arrayref",
  "arrayvec",
@@ -443,7 +441,7 @@ dependencies = [
  "cfg-if",
  "constant_time_eq",
  "digest",
- "rayon",
+ "rayon-core",
 ]
 
 [[package]]
@@ -473,12 +471,12 @@ dependencies = [
 
 [[package]]
 name = "bstr"
-version = "1.9.0"
+version = "1.10.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c48f0051a4b4c5e0b6d365cd04af53aeaa209e3cc15ec2cdb69e73cc87fbd0dc"
+checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c"
 dependencies = [
  "memchr",
- "regex-automata 0.4.3",
+ "regex-automata 0.4.7",
  "serde",
 ]
 
@@ -496,9 +494,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
 
 [[package]]
 name = "bytes"
-version = "1.5.0"
+version = "1.7.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223"
+checksum = "8318a53db07bb3f8dca91a600466bdb3f2eaadeedfdbcf02e1accbad9271ba50"
 
 [[package]]
 name = "bzip2"
@@ -539,12 +537,13 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
 
 [[package]]
 name = "cc"
-version = "1.0.83"
+version = "1.1.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0"
+checksum = "57b6a275aa2903740dc87da01c62040406b8812552e97129a63ea8850a17c6e6"
 dependencies = [
  "jobserver",
  "libc",
+ "shlex",
 ]
 
 [[package]]
@@ -565,7 +564,7 @@ dependencies = [
  "num-traits",
  "serde",
  "wasm-bindgen",
- "windows-targets 0.52.0",
+ "windows-targets 0.52.6",
 ]
 
 [[package]]
@@ -597,9 +596,9 @@ dependencies = [
 
 [[package]]
 name = "clap"
-version = "4.4.18"
+version = "4.5.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e578d6ec4194633722ccf9544794b71b1385c3c027efe0c55db226fc880865c"
+checksum = "ed6719fffa43d0d87e5fd8caeab59be1554fb028cd30edc88fc4369b17971019"
 dependencies = [
  "clap_builder",
  "clap_derive",
@@ -607,33 +606,33 @@ dependencies = [
 
 [[package]]
 name = "clap_builder"
-version = "4.4.18"
+version = "4.5.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4df4df40ec50c46000231c914968278b1eb05098cf8f1b3a518a95030e71d1c7"
+checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6"
 dependencies = [
  "anstream",
  "anstyle",
  "clap_lex",
- "strsim",
+ "strsim 0.11.1",
 ]
 
 [[package]]
 name = "clap_derive"
-version = "4.4.7"
+version = "4.5.13"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442"
+checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0"
 dependencies = [
- "heck",
+ "heck 0.5.0",
  "proc-macro2",
  "quote",
- "syn 2.0.48",
+ "syn 2.0.76",
 ]
 
 [[package]]
 name = "clap_lex"
-version = "0.6.0"
+version = "0.7.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1"
+checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97"
 
 [[package]]
 name = "clipboard-win"
@@ -664,9 +663,9 @@ dependencies = [
 
 [[package]]
 name = "colorchoice"
-version = "1.0.0"
+version = "1.0.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
+checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0"
 
 [[package]]
 name = "concurrent-queue"
@@ -678,6 +677,19 @@ dependencies = [
 ]
 
 [[package]]
+name = "console"
+version = "0.15.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb"
+dependencies = [
+ "encode_unicode",
+ "lazy_static",
+ "libc",
+ "unicode-width",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
 name = "const-oid"
 version = "0.9.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -840,7 +852,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3"
 dependencies = [
  "proc-macro2",
  "quote",
- "syn 2.0.48",
+ "syn 2.0.76",
 ]
 
 [[package]]
@@ -863,8 +875,8 @@ dependencies = [
  "ident_case",
  "proc-macro2",
  "quote",
- "strsim",
- "syn 2.0.48",
+ "strsim 0.10.0",
+ "syn 2.0.76",
 ]
 
 [[package]]
@@ -875,14 +887,14 @@ checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f"
 dependencies = [
  "darling_core",
  "quote",
- "syn 2.0.48",
+ "syn 2.0.76",
 ]
 
 [[package]]
 name = "data-encoding"
-version = "2.5.0"
+version = "2.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5"
+checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2"
 
 [[package]]
 name = "der"
@@ -963,6 +975,12 @@ dependencies = [
 ]
 
 [[package]]
+name = "dissimilar"
+version = "1.0.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59f8e79d1fbf76bdfbde321e902714bf6c49df88a7dda6fc682fc2979226962d"
+
+[[package]]
 name = "doc-comment"
 version = "0.3.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -989,9 +1007,9 @@ dependencies = [
 
 [[package]]
 name = "ed25519-dalek"
-version = "2.1.0"
+version = "2.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1f628eaec48bfd21b865dc2950cfa014450c01d2fa2b69a86c2fd5844ec523c0"
+checksum = "4a3daa8e81a3963a60642bcc1f90a670680bd4a77535faa384e9d1c79d620871"
 dependencies = [
  "curve25519-dalek",
  "ed25519",
@@ -1008,13 +1026,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
 
 [[package]]
-name = "encoding_rs"
-version = "0.8.33"
+name = "encode_unicode"
+version = "0.3.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1"
-dependencies = [
- "cfg-if",
-]
+checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
 
 [[package]]
 name = "endian-type"
@@ -1030,7 +1045,7 @@ checksum = "ba7795da175654fe16979af73f81f26a8ea27638d8d9823d317016888a63dc4c"
 dependencies = [
  "num-traits",
  "quote",
- "syn 2.0.48",
+ "syn 2.0.76",
 ]
 
 [[package]]
@@ -1041,11 +1056,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
 
 [[package]]
 name = "erased-serde"
-version = "0.4.4"
+version = "0.4.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2b73807008a3c7f171cc40312f37d95ef0396e048b5848d775f54b1a4dd4a0d3"
+checksum = "24e2389d65ab4fab27dc2a5de7b191e1f6617d1f1c8855c0dc569c94a4cbb18d"
 dependencies = [
  "serde",
+ "typeid",
 ]
 
 [[package]]
@@ -1117,6 +1133,16 @@ dependencies = [
 ]
 
 [[package]]
+name = "expect-test"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9e0be0a561335815e06dab7c62e50353134c796e7a6155402a64bcff66b6a5e0"
+dependencies = [
+ "dissimilar",
+ "once_cell",
+]
+
+[[package]]
 name = "fastcdc"
 version = "3.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1216,7 +1242,7 @@ dependencies = [
  "lazy_static",
  "libc",
  "log",
- "mio",
+ "mio 0.8.11",
  "nix 0.24.3",
  "vhost",
  "virtio-queue",
@@ -1293,7 +1319,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac"
 dependencies = [
  "proc-macro2",
  "quote",
- "syn 2.0.48",
+ "syn 2.0.76",
 ]
 
 [[package]]
@@ -1343,19 +1369,22 @@ dependencies = [
 
 [[package]]
 name = "gcp_auth"
-version = "0.10.0"
+version = "0.12.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "de2c71ea685b88a1aa50e9fb66fe0e1cb29d755f58cca41fb8c91ef604d4f4d4"
+checksum = "536c79e79dde296a800738474691e97031769bed9b54e6dd0401b169d35d693d"
 dependencies = [
  "async-trait",
- "base64",
+ "base64 0.22.1",
+ "bytes",
  "chrono",
  "home",
+ "http",
+ "http-body-util",
  "hyper",
  "hyper-rustls",
+ "hyper-util",
  "ring",
- "rustls 0.21.12",
- "rustls-pemfile 1.0.4",
+ "rustls-pemfile",
  "serde",
  "serde_json",
  "thiserror",
@@ -1363,7 +1392,6 @@ dependencies = [
  "tracing",
  "tracing-futures",
  "url",
- "which 5.0.0",
 ]
 
 [[package]]
@@ -1382,6 +1410,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "0b32dfe1fdfc0bbde1f22a5da25355514b5e450c33a6af6770884c8750aedfbc"
 
 [[package]]
+name = "generator"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "186014d53bc231d0090ef8d6f03e0920c54d85a5ed22f4f2f74315ec56cf83fb"
+dependencies = [
+ "cc",
+ "cfg-if",
+ "libc",
+ "log",
+ "rustversion",
+ "windows",
+]
+
+[[package]]
 name = "generic-array"
 version = "0.14.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1398,8 +1440,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5"
 dependencies = [
  "cfg-if",
+ "js-sys",
  "libc",
  "wasi",
+ "wasm-bindgen",
 ]
 
 [[package]]
@@ -1416,9 +1460,9 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
 
 [[package]]
 name = "h2"
-version = "0.3.26"
+version = "0.4.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8"
+checksum = "816ec7294445779408f36fe57bc5b7fc1cf59664059096c65f905c1c61f58069"
 dependencies = [
  "bytes",
  "fnv",
@@ -1462,10 +1506,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
 
 [[package]]
+name = "heck"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
+
+[[package]]
 name = "hermit-abi"
-version = "0.3.4"
+version = "0.3.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5d3d0e0f38255e7fa3cf31335b3a56f05febd18025f4db5ef7a0cfb4f8da651f"
+checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024"
 
 [[package]]
 name = "hex"
@@ -1490,9 +1540,9 @@ dependencies = [
 
 [[package]]
 name = "http"
-version = "0.2.11"
+version = "1.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb"
+checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258"
 dependencies = [
  "bytes",
  "fnv",
@@ -1501,12 +1551,24 @@ dependencies = [
 
 [[package]]
 name = "http-body"
-version = "0.4.6"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643"
+dependencies = [
+ "bytes",
+ "http",
+]
+
+[[package]]
+name = "http-body-util"
+version = "0.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2"
+checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d"
 dependencies = [
  "bytes",
+ "futures-core",
  "http",
+ "http-body",
  "pin-project-lite",
 ]
 
@@ -1530,13 +1592,12 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
 
 [[package]]
 name = "hyper"
-version = "0.14.28"
+version = "1.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80"
+checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05"
 dependencies = [
  "bytes",
  "futures-channel",
- "futures-core",
  "futures-util",
  "h2",
  "http",
@@ -1545,38 +1606,60 @@ dependencies = [
  "httpdate",
  "itoa",
  "pin-project-lite",
- "socket2",
+ "smallvec",
  "tokio",
- "tower-service",
- "tracing",
  "want",
 ]
 
 [[package]]
 name = "hyper-rustls"
-version = "0.24.2"
+version = "0.27.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590"
+checksum = "5ee4be2c948921a1a5320b629c4193916ed787a7f7f293fd3f7f5a6c9de74155"
 dependencies = [
  "futures-util",
  "http",
  "hyper",
- "rustls 0.21.12",
- "rustls-native-certs 0.6.3",
+ "hyper-util",
+ "rustls",
+ "rustls-native-certs",
+ "rustls-pki-types",
  "tokio",
- "tokio-rustls 0.24.1",
+ "tokio-rustls",
+ "tower-service",
 ]
 
 [[package]]
 name = "hyper-timeout"
-version = "0.4.1"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3203a961e5c83b6f5498933e78b6b263e208c197b63e9c6c53cc82ffd3f63793"
+dependencies = [
+ "hyper",
+ "hyper-util",
+ "pin-project-lite",
+ "tokio",
+ "tower-service",
+]
+
+[[package]]
+name = "hyper-util"
+version = "0.1.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1"
+checksum = "cde7055719c54e36e95e8719f95883f22072a48ede39db7fc17a4e1d5281e9b9"
 dependencies = [
+ "bytes",
+ "futures-channel",
+ "futures-util",
+ "http",
+ "http-body",
  "hyper",
  "pin-project-lite",
+ "socket2",
  "tokio",
- "tokio-io-timeout",
+ "tower",
+ "tower-service",
+ "tracing",
 ]
 
 [[package]]
@@ -1590,7 +1673,7 @@ dependencies = [
  "iana-time-zone-haiku",
  "js-sys",
  "wasm-bindgen",
- "windows-core",
+ "windows-core 0.52.0",
 ]
 
 [[package]]
@@ -1619,30 +1702,6 @@ dependencies = [
 ]
 
 [[package]]
-name = "imbl"
-version = "2.0.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "978d142c8028edf52095703af2fad11d6f611af1246685725d6b850634647085"
-dependencies = [
- "bitmaps",
- "imbl-sized-chunks",
- "proptest",
- "rand_core",
- "rand_xoshiro",
- "serde",
- "version_check",
-]
-
-[[package]]
-name = "imbl-sized-chunks"
-version = "0.1.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "144006fb58ed787dcae3f54575ff4349755b00ccc99f4b4873860b654be1ed63"
-dependencies = [
- "bitmaps",
-]
-
-[[package]]
 name = "indexmap"
 version = "1.9.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1665,6 +1724,20 @@ dependencies = [
 ]
 
 [[package]]
+name = "indicatif"
+version = "0.17.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3"
+dependencies = [
+ "console",
+ "instant",
+ "number_prefix",
+ "portable-atomic",
+ "unicode-width",
+ "vt100",
+]
+
+[[package]]
 name = "instant"
 version = "0.1.12"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1697,6 +1770,12 @@ dependencies = [
 ]
 
 [[package]]
+name = "is_terminal_polyfill"
+version = "1.70.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
+
+[[package]]
 name = "itertools"
 version = "0.10.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1707,18 +1786,18 @@ dependencies = [
 
 [[package]]
 name = "itertools"
-version = "0.11.0"
+version = "0.12.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"
+checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569"
 dependencies = [
  "either",
 ]
 
 [[package]]
 name = "itertools"
-version = "0.12.0"
+version = "0.13.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "25db6b064527c5d482d0423354fcd07a89a2dfe07b67892e62411946db7f07b0"
+checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
 dependencies = [
  "either",
 ]
@@ -1731,9 +1810,9 @@ checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c"
 
 [[package]]
 name = "jobserver"
-version = "0.1.27"
+version = "0.1.32"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8c37f63953c4c63420ed5fd3d6d398c719489b9f872b9fa683262f8edd363c7d"
+checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0"
 dependencies = [
  "libc",
 ]
@@ -1749,9 +1828,9 @@ dependencies = [
 
 [[package]]
 name = "lazy_static"
-version = "1.4.0"
+version = "1.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
 
 [[package]]
 name = "lexical-core"
@@ -1819,9 +1898,9 @@ dependencies = [
 
 [[package]]
 name = "libc"
-version = "0.2.152"
+version = "0.2.158"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "13e3bf6590cbc649f4d1a3eefc9d5d6eb746f5200ffb04e5e142700b8faa56e7"
+checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439"
 
 [[package]]
 name = "libm"
@@ -1830,21 +1909,31 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
 
 [[package]]
+name = "libmimalloc-sys"
+version = "0.1.39"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "23aa6811d3bd4deb8a84dde645f943476d13b248d818edcf8ce0b2f37f036b44"
+dependencies = [
+ "cc",
+ "libc",
+]
+
+[[package]]
 name = "libredox"
 version = "0.0.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8"
 dependencies = [
- "bitflags 2.4.2",
+ "bitflags 2.6.0",
  "libc",
  "redox_syscall 0.4.1",
 ]
 
 [[package]]
 name = "linux-raw-sys"
-version = "0.4.13"
+version = "0.4.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c"
+checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89"
 
 [[package]]
 name = "litrs"
@@ -1869,10 +1958,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
 
 [[package]]
+name = "loom"
+version = "0.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca"
+dependencies = [
+ "cfg-if",
+ "generator",
+ "scoped-tls",
+ "tracing",
+ "tracing-subscriber",
+]
+
+[[package]]
 name = "lru"
-version = "0.12.3"
+version = "0.12.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d3262e75e648fce39813cb56ac41f3c3e3f65217ebf3844d818d1f9398cfb0dc"
+checksum = "37ee39891760e7d94734f6f63fedc29a2e4a152f836120753a72503f09fcf904"
 dependencies = [
  "hashbrown 0.14.3",
 ]
@@ -1894,7 +1996,7 @@ version = "0.16.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a200ae03df8c3dce7a963f6eeaac8feb41bf9001cb7e5ab22e3205aec2f0373d"
 dependencies = [
- "bitflags 2.4.2",
+ "bitflags 2.6.0",
  "libc",
  "magic-sys",
  "thiserror",
@@ -1926,6 +2028,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94"
 
 [[package]]
+name = "matchit"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3"
+
+[[package]]
 name = "md-5"
 version = "0.10.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1960,6 +2068,15 @@ dependencies = [
 ]
 
 [[package]]
+name = "mimalloc"
+version = "0.1.43"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68914350ae34959d83f732418d51e2427a794055d0b9529f48259ac07af65633"
+dependencies = [
+ "libmimalloc-sys",
+]
+
+[[package]]
 name = "mime"
 version = "0.3.17"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1993,12 +2110,59 @@ dependencies = [
 ]
 
 [[package]]
+name = "mio"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec"
+dependencies = [
+ "hermit-abi",
+ "libc",
+ "wasi",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
 name = "multimap"
 version = "0.8.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a"
 
 [[package]]
+name = "nar-bridge"
+version = "0.1.0"
+dependencies = [
+ "axum",
+ "bytes",
+ "clap",
+ "data-encoding",
+ "futures",
+ "hex-literal",
+ "itertools 0.12.1",
+ "lru",
+ "mimalloc",
+ "nix-compat",
+ "parking_lot 0.12.3",
+ "prost",
+ "prost-build",
+ "rstest",
+ "serde",
+ "thiserror",
+ "tokio",
+ "tokio-listener",
+ "tokio-util",
+ "tonic",
+ "tonic-build",
+ "tower",
+ "tower-http",
+ "tracing",
+ "tracing-subscriber",
+ "tvix-castore",
+ "tvix-store",
+ "tvix-tracing",
+ "url",
+]
+
+[[package]]
 name = "nibble_vec"
 version = "0.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2048,7 +2212,7 @@ version = "0.27.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053"
 dependencies = [
- "bitflags 2.4.2",
+ "bitflags 2.6.0",
  "cfg-if",
  "libc",
 ]
@@ -2057,8 +2221,9 @@ dependencies = [
 name = "nix-compat"
 version = "0.1.0"
 dependencies = [
- "bitflags 2.4.2",
+ "bitflags 2.6.0",
  "bstr",
+ "bytes",
  "criterion",
  "data-encoding",
  "ed25519",
@@ -2068,6 +2233,8 @@ dependencies = [
  "glob",
  "hex-literal",
  "lazy_static",
+ "mimalloc",
+ "nix-compat-derive",
  "nom",
  "num-traits",
  "pin-project-lite",
@@ -2076,13 +2243,50 @@ dependencies = [
  "serde",
  "serde_json",
  "sha2",
+ "smol_str",
  "thiserror",
  "tokio",
  "tokio-test",
+ "tracing",
  "zstd",
 ]
 
 [[package]]
+name = "nix-compat-derive"
+version = "0.1.0"
+dependencies = [
+ "hex-literal",
+ "nix-compat",
+ "pretty_assertions",
+ "proc-macro2",
+ "quote",
+ "rstest",
+ "syn 2.0.76",
+ "tokio",
+ "tokio-test",
+]
+
+[[package]]
+name = "nix-compat-derive-tests"
+version = "0.1.0"
+dependencies = [
+ "hex-literal",
+ "nix-compat",
+ "nix-compat-derive",
+ "pretty_assertions",
+ "rstest",
+ "tokio",
+ "tokio-test",
+ "trybuild",
+]
+
+[[package]]
+name = "nohash-hasher"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451"
+
+[[package]]
 name = "nom"
 version = "7.1.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2119,9 +2323,9 @@ checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
 
 [[package]]
 name = "num-traits"
-version = "0.2.18"
+version = "0.2.19"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a"
+checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
 dependencies = [
  "autocfg",
  "libm",
@@ -2138,6 +2342,12 @@ dependencies = [
 ]
 
 [[package]]
+name = "number_prefix"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
+
+[[package]]
 name = "object"
 version = "0.32.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2148,26 +2358,26 @@ dependencies = [
 
 [[package]]
 name = "object_store"
-version = "0.9.1"
+version = "0.10.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b8718f8b65fdf67a45108d1548347d4af7d71fb81ce727bbf9e3b2535e079db3"
+checksum = "e6da452820c715ce78221e8202ccc599b4a52f3e1eb3eedb487b680c81a8e3f3"
 dependencies = [
  "async-trait",
- "base64",
+ "base64 0.22.1",
  "bytes",
  "chrono",
  "futures",
  "humantime",
  "hyper",
- "itertools 0.12.0",
+ "itertools 0.13.0",
  "md-5",
- "parking_lot 0.12.2",
+ "parking_lot 0.12.3",
  "percent-encoding",
  "quick-xml",
  "rand",
  "reqwest",
  "ring",
- "rustls-pemfile 2.1.0",
+ "rustls-pemfile",
  "serde",
  "serde_json",
  "snafu",
@@ -2211,18 +2421,43 @@ dependencies = [
 ]
 
 [[package]]
+name = "opentelemetry"
+version = "0.24.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c365a63eec4f55b7efeceb724f1336f26a9cf3427b70e59e2cd2a5b947fba96"
+dependencies = [
+ "futures-core",
+ "futures-sink",
+ "js-sys",
+ "once_cell",
+ "pin-project-lite",
+ "thiserror",
+]
+
+[[package]]
+name = "opentelemetry-http"
+version = "0.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ad31e9de44ee3538fb9d64fe3376c1362f406162434609e79aea2a41a0af78ab"
+dependencies = [
+ "async-trait",
+ "bytes",
+ "http",
+ "opentelemetry 0.24.0",
+]
+
+[[package]]
 name = "opentelemetry-otlp"
-version = "0.15.0"
+version = "0.17.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1a016b8d9495c639af2145ac22387dcb88e44118e45320d9238fbf4e7889abcb"
+checksum = "6b925a602ffb916fb7421276b86756027b37ee708f9dce2dbdcc51739f07e727"
 dependencies = [
  "async-trait",
  "futures-core",
  "http",
- "opentelemetry",
+ "opentelemetry 0.24.0",
  "opentelemetry-proto",
- "opentelemetry-semantic-conventions",
- "opentelemetry_sdk",
+ "opentelemetry_sdk 0.24.1",
  "prost",
  "thiserror",
  "tokio",
@@ -2231,23 +2466,17 @@ dependencies = [
 
 [[package]]
 name = "opentelemetry-proto"
-version = "0.5.0"
+version = "0.7.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3a8fddc9b68f5b80dae9d6f510b88e02396f006ad48cac349411fbecc80caae4"
+checksum = "30ee9f20bff9c984511a02f082dc8ede839e4a9bf15cc2487c8d6fea5ad850d9"
 dependencies = [
- "opentelemetry",
- "opentelemetry_sdk",
+ "opentelemetry 0.24.0",
+ "opentelemetry_sdk 0.24.1",
  "prost",
  "tonic",
 ]
 
 [[package]]
-name = "opentelemetry-semantic-conventions"
-version = "0.14.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f9ab5bd6c42fb9349dcf28af2ba9a0667f697f9bdcca045d39f2cec5543e2910"
-
-[[package]]
 name = "opentelemetry_sdk"
 version = "0.22.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2260,11 +2489,30 @@ dependencies = [
  "futures-util",
  "glob",
  "once_cell",
- "opentelemetry",
+ "opentelemetry 0.22.0",
  "ordered-float",
  "percent-encoding",
  "rand",
  "thiserror",
+]
+
+[[package]]
+name = "opentelemetry_sdk"
+version = "0.24.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "692eac490ec80f24a17828d49b40b60f5aeaccdfe6a503f939713afd22bc28df"
+dependencies = [
+ "async-trait",
+ "futures-channel",
+ "futures-executor",
+ "futures-util",
+ "glob",
+ "once_cell",
+ "opentelemetry 0.24.0",
+ "percent-encoding",
+ "rand",
+ "serde_json",
+ "thiserror",
  "tokio",
  "tokio-stream",
 ]
@@ -2312,9 +2560,9 @@ dependencies = [
 
 [[package]]
 name = "parking_lot"
-version = "0.12.2"
+version = "0.12.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7e4af0ca4f6caed20e900d564c242b8e5d4903fdacf31d3daf527b66fe6f42fb"
+checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27"
 dependencies = [
  "lock_api",
  "parking_lot_core 0.9.9",
@@ -2361,9 +2609,9 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
 
 [[package]]
 name = "petgraph"
-version = "0.6.4"
+version = "0.6.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9"
+checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db"
 dependencies = [
  "fixedbitset",
  "indexmap 2.1.0",
@@ -2386,14 +2634,14 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405"
 dependencies = [
  "proc-macro2",
  "quote",
- "syn 2.0.48",
+ "syn 2.0.76",
 ]
 
 [[package]]
 name = "pin-project-lite"
-version = "0.2.13"
+version = "0.2.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58"
+checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02"
 
 [[package]]
 name = "pin-utils"
@@ -2477,6 +2725,12 @@ dependencies = [
 ]
 
 [[package]]
+name = "portable-atomic"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0"
+
+[[package]]
 name = "powerfmt"
 version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2505,43 +2759,40 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a41cf62165e97c7f814d2221421dbb9afcbcdb0a88068e5ea206e19951c2cbb5"
 dependencies = [
  "proc-macro2",
- "syn 2.0.48",
+ "syn 2.0.76",
 ]
 
 [[package]]
 name = "proc-macro2"
-version = "1.0.76"
+version = "1.0.86"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c"
+checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77"
 dependencies = [
  "unicode-ident",
 ]
 
 [[package]]
 name = "proptest"
-version = "1.4.0"
+version = "1.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf"
+checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d"
 dependencies = [
- "bit-set",
- "bit-vec",
- "bitflags 2.4.2",
+ "bitflags 2.6.0",
  "lazy_static",
  "num-traits",
  "rand",
  "rand_chacha",
  "rand_xorshift",
  "regex-syntax 0.8.2",
- "rusty-fork",
  "tempfile",
  "unarray",
 ]
 
 [[package]]
 name = "prost"
-version = "0.12.3"
+version = "0.13.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a"
+checksum = "e13db3d3fde688c61e2446b4d843bc27a7e8af269a69440c0308021dc92333cc"
 dependencies = [
  "bytes",
  "prost-derive",
@@ -2549,13 +2800,13 @@ dependencies = [
 
 [[package]]
 name = "prost-build"
-version = "0.12.3"
+version = "0.13.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c55e02e35260070b6f716a2423c2ff1c3bb1642ddca6f99e1f26d06268a0e2d2"
+checksum = "5bb182580f71dd070f88d01ce3de9f4da5021db7115d2e1c3605a754153b77c1"
 dependencies = [
  "bytes",
- "heck",
- "itertools 0.11.0",
+ "heck 0.4.1",
+ "itertools 0.10.5",
  "log",
  "multimap",
  "once_cell",
@@ -2566,38 +2817,37 @@ dependencies = [
  "pulldown-cmark",
  "pulldown-cmark-to-cmark",
  "regex",
- "syn 2.0.48",
+ "syn 2.0.76",
  "tempfile",
- "which 4.4.2",
 ]
 
 [[package]]
 name = "prost-derive"
-version = "0.12.3"
+version = "0.13.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e"
+checksum = "18bec9b0adc4eba778b33684b7ba3e7137789434769ee3ce3930463ef904cfca"
 dependencies = [
  "anyhow",
- "itertools 0.11.0",
+ "itertools 0.10.5",
  "proc-macro2",
  "quote",
- "syn 2.0.48",
+ "syn 2.0.76",
 ]
 
 [[package]]
 name = "prost-types"
-version = "0.12.3"
+version = "0.13.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "193898f59edcf43c26227dcd4c8427f00d99d61e95dcde58dabd49fa291d470e"
+checksum = "cee5168b05f49d4b0ca581206eb14a7b22fafd963efe729ac48eb03266e25cc2"
 dependencies = [
  "prost",
 ]
 
 [[package]]
 name = "prost-wkt"
-version = "0.5.0"
+version = "0.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4d8ef9c3f0f1dab910d2b7e2c24a8e4322e122eba6d7a1921eeebcebbc046c40"
+checksum = "a8d84e2bee181b04c2bac339f2bfe818c46a99750488cc6728ce4181d5aa8299"
 dependencies = [
  "chrono",
  "inventory",
@@ -2610,11 +2860,11 @@ dependencies = [
 
 [[package]]
 name = "prost-wkt-build"
-version = "0.5.0"
+version = "0.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5b31cae9a54ca84fee1504740a82eebf2479532905e106f63ca0c3bc8d780321"
+checksum = "8a669d5acbe719010c6f62a64e6d7d88fdedc1fe46e419747949ecb6312e9b14"
 dependencies = [
- "heck",
+ "heck 0.4.1",
  "prost",
  "prost-build",
  "prost-types",
@@ -2623,9 +2873,9 @@ dependencies = [
 
 [[package]]
 name = "prost-wkt-types"
-version = "0.5.0"
+version = "0.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "435be4a8704091b4c5fb1d79799de7f2dbff53af05edf29385237f8cf7ab37ee"
+checksum = "01ef068e9b82e654614b22e6b13699bd545b6c0e2e721736008b00b38aeb4f64"
 dependencies = [
  "chrono",
  "prost",
@@ -2645,7 +2895,7 @@ version = "0.9.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b"
 dependencies = [
- "bitflags 2.4.2",
+ "bitflags 2.6.0",
  "memchr",
  "unicase",
 ]
@@ -2660,26 +2910,67 @@ dependencies = [
 ]
 
 [[package]]
-name = "quick-error"
-version = "1.2.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
-
-[[package]]
 name = "quick-xml"
-version = "0.31.0"
+version = "0.36.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33"
+checksum = "96a05e2e8efddfa51a84ca47cec303fac86c8541b686d37cac5efc0e094417bc"
 dependencies = [
  "memchr",
  "serde",
 ]
 
 [[package]]
+name = "quinn"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e4ceeeeabace7857413798eb1ffa1e9c905a9946a57d81fb69b4b71c4d8eb3ad"
+dependencies = [
+ "bytes",
+ "pin-project-lite",
+ "quinn-proto",
+ "quinn-udp",
+ "rustc-hash 1.1.0",
+ "rustls",
+ "thiserror",
+ "tokio",
+ "tracing",
+]
+
+[[package]]
+name = "quinn-proto"
+version = "0.11.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ddf517c03a109db8100448a4be38d498df8a210a99fe0e1b9eaf39e78c640efe"
+dependencies = [
+ "bytes",
+ "rand",
+ "ring",
+ "rustc-hash 1.1.0",
+ "rustls",
+ "slab",
+ "thiserror",
+ "tinyvec",
+ "tracing",
+]
+
+[[package]]
+name = "quinn-udp"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9096629c45860fc7fb143e125eb826b5e721e10be3263160c7d60ca832cf8c46"
+dependencies = [
+ "libc",
+ "once_cell",
+ "socket2",
+ "tracing",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
 name = "quote"
-version = "1.0.35"
+version = "1.0.37"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
+checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af"
 dependencies = [
  "proc-macro2",
 ]
@@ -2734,15 +3025,6 @@ dependencies = [
 ]
 
 [[package]]
-name = "rand_xoshiro"
-version = "0.6.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa"
-dependencies = [
- "rand_core",
-]
-
-[[package]]
 name = "rayon"
 version = "1.8.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2763,6 +3045,15 @@ dependencies = [
 ]
 
 [[package]]
+name = "redb"
+version = "2.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "58323dc32ea52a8ae105ff94bc0460c5d906307533ba3401aa63db3cbe491fe5"
+dependencies = [
+ "libc",
+]
+
+[[package]]
 name = "redox_syscall"
 version = "0.2.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2802,13 +3093,13 @@ dependencies = [
 
 [[package]]
 name = "regex"
-version = "1.10.2"
+version = "1.10.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343"
+checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619"
 dependencies = [
  "aho-corasick",
  "memchr",
- "regex-automata 0.4.3",
+ "regex-automata 0.4.7",
  "regex-syntax 0.8.2",
 ]
 
@@ -2823,9 +3114,9 @@ dependencies = [
 
 [[package]]
 name = "regex-automata"
-version = "0.4.3"
+version = "0.4.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f"
+checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df"
 dependencies = [
  "aho-corasick",
  "memchr",
@@ -2852,20 +3143,21 @@ checksum = "e898588f33fdd5b9420719948f9f2a32c922a246964576f71ba7f24f80610fbc"
 
 [[package]]
 name = "reqwest"
-version = "0.11.23"
+version = "0.12.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "37b1ae8d9ac08420c66222fb9096fc5de435c3c48542bc5336c51892cffafb41"
+checksum = "f8f4955649ef5c38cc7f9e8aa41761d48fb9677197daea9984dc54f56aad5e63"
 dependencies = [
- "base64",
+ "base64 0.22.1",
  "bytes",
- "encoding_rs",
  "futures-core",
  "futures-util",
  "h2",
  "http",
  "http-body",
+ "http-body-util",
  "hyper",
  "hyper-rustls",
+ "hyper-util",
  "ipnet",
  "js-sys",
  "log",
@@ -2873,15 +3165,17 @@ dependencies = [
  "once_cell",
  "percent-encoding",
  "pin-project-lite",
- "rustls 0.21.12",
- "rustls-native-certs 0.6.3",
- "rustls-pemfile 1.0.4",
+ "quinn",
+ "rustls",
+ "rustls-native-certs",
+ "rustls-pemfile",
+ "rustls-pki-types",
  "serde",
  "serde_json",
  "serde_urlencoded",
- "system-configuration",
+ "sync_wrapper 1.0.1",
  "tokio",
- "tokio-rustls 0.24.1",
+ "tokio-rustls",
  "tokio-util",
  "tower-service",
  "url",
@@ -2889,7 +3183,40 @@ dependencies = [
  "wasm-bindgen-futures",
  "wasm-streams",
  "web-sys",
- "winreg",
+ "windows-registry",
+]
+
+[[package]]
+name = "reqwest-middleware"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "562ceb5a604d3f7c885a792d42c199fd8af239d0a51b2fa6a78aafa092452b04"
+dependencies = [
+ "anyhow",
+ "async-trait",
+ "http",
+ "reqwest",
+ "serde",
+ "thiserror",
+ "tower-service",
+]
+
+[[package]]
+name = "reqwest-tracing"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bfdd9bfa64c72233d8dd99ab7883efcdefe9e16d46488ecb9228b71a2e2ceb45"
+dependencies = [
+ "anyhow",
+ "async-trait",
+ "getrandom",
+ "http",
+ "matchit 0.8.4",
+ "opentelemetry 0.22.0",
+ "reqwest",
+ "reqwest-middleware",
+ "tracing",
+ "tracing-opentelemetry 0.23.0",
 ]
 
 [[package]]
@@ -2924,7 +3251,7 @@ dependencies = [
  "countme",
  "hashbrown 0.14.3",
  "memoffset 0.9.0",
- "rustc-hash",
+ "rustc-hash 1.1.0",
  "text-size",
 ]
 
@@ -2953,7 +3280,7 @@ dependencies = [
  "regex",
  "relative-path",
  "rustc_version",
- "syn 2.0.48",
+ "syn 2.0.76",
  "unicode-ident",
 ]
 
@@ -2966,7 +3293,7 @@ dependencies = [
  "quote",
  "rand",
  "rustc_version",
- "syn 2.0.48",
+ "syn 2.0.76",
 ]
 
 [[package]]
@@ -2982,6 +3309,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
 
 [[package]]
+name = "rustc-hash"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152"
+
+[[package]]
 name = "rustc_version"
 version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2992,11 +3325,11 @@ dependencies = [
 
 [[package]]
 name = "rustix"
-version = "0.38.30"
+version = "0.38.35"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "322394588aaf33c24007e8bb3238ee3e4c5c09c084ab32bc73890b99ff326bca"
+checksum = "a85d50532239da68e9addb745ba38ff4612a242c1c7ceea689c4bc7c2f43c36f"
 dependencies = [
- "bitflags 2.4.2",
+ "bitflags 2.6.0",
  "errno",
  "libc",
  "linux-raw-sys",
@@ -3005,50 +3338,27 @@ dependencies = [
 
 [[package]]
 name = "rustls"
-version = "0.21.12"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e"
-dependencies = [
- "log",
- "ring",
- "rustls-webpki 0.101.7",
- "sct",
-]
-
-[[package]]
-name = "rustls"
-version = "0.22.4"
+version = "0.23.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bf4ef73721ac7bcd79b2b315da7779d8fc09718c6b3d2d1b2d94850eb8c18432"
+checksum = "ebbbdb961df0ad3f2652da8f3fdc4b36122f568f968f45ad3316f26c025c677b"
 dependencies = [
  "log",
+ "once_cell",
  "ring",
  "rustls-pki-types",
- "rustls-webpki 0.102.2",
+ "rustls-webpki",
  "subtle",
  "zeroize",
 ]
 
 [[package]]
 name = "rustls-native-certs"
-version = "0.6.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00"
-dependencies = [
- "openssl-probe",
- "rustls-pemfile 1.0.4",
- "schannel",
- "security-framework",
-]
-
-[[package]]
-name = "rustls-native-certs"
 version = "0.7.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "8f1fb85efa936c42c6d5fc28d2629bb51e4b2f4b8a5211e297d599cc5a093792"
 dependencies = [
  "openssl-probe",
- "rustls-pemfile 2.1.0",
+ "rustls-pemfile",
  "rustls-pki-types",
  "schannel",
  "security-framework",
@@ -3056,20 +3366,11 @@ dependencies = [
 
 [[package]]
 name = "rustls-pemfile"
-version = "1.0.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c"
-dependencies = [
- "base64",
-]
-
-[[package]]
-name = "rustls-pemfile"
 version = "2.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "3c333bb734fcdedcea57de1602543590f545f127dc8b533324318fd492c5c70b"
 dependencies = [
- "base64",
+ "base64 0.21.7",
  "rustls-pki-types",
 ]
 
@@ -3081,16 +3382,6 @@ checksum = "5ede67b28608b4c60685c7d54122d4400d90f62b40caee7700e700380a390fa8"
 
 [[package]]
 name = "rustls-webpki"
-version = "0.101.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765"
-dependencies = [
- "ring",
- "untrusted",
-]
-
-[[package]]
-name = "rustls-webpki"
 version = "0.102.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "faaa0a62740bedb9b2ef5afa303da42764c012f743917351dc9a237ea1663610"
@@ -3107,18 +3398,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4"
 
 [[package]]
-name = "rusty-fork"
-version = "0.3.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f"
-dependencies = [
- "fnv",
- "quick-error",
- "tempfile",
- "wait-timeout",
-]
-
-[[package]]
 name = "rustyline"
 version = "10.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3166,20 +3445,16 @@ dependencies = [
 ]
 
 [[package]]
-name = "scopeguard"
-version = "1.2.0"
+name = "scoped-tls"
+version = "1.0.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
+checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294"
 
 [[package]]
-name = "sct"
-version = "0.7.1"
+name = "scopeguard"
+version = "1.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414"
-dependencies = [
- "ring",
- "untrusted",
-]
+checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
 
 [[package]]
 name = "security-framework"
@@ -3212,22 +3487,22 @@ checksum = "b97ed7a9823b74f99c7742f5336af7be5ecd3eeafcb1507d1fa93347b1d589b0"
 
 [[package]]
 name = "serde"
-version = "1.0.197"
+version = "1.0.209"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2"
+checksum = "99fce0ffe7310761ca6bf9faf5115afbc19688edd00171d81b1bb1b116c63e09"
 dependencies = [
  "serde_derive",
 ]
 
 [[package]]
 name = "serde_derive"
-version = "1.0.197"
+version = "1.0.209"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b"
+checksum = "a5831b979fd7b5439637af1752d535ff49f4860c0f341d1baeb6faf0f4242170"
 dependencies = [
  "proc-macro2",
  "quote",
- "syn 2.0.48",
+ "syn 2.0.76",
 ]
 
 [[package]]
@@ -3242,6 +3517,16 @@ dependencies = [
 ]
 
 [[package]]
+name = "serde_path_to_error"
+version = "0.1.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6"
+dependencies = [
+ "itoa",
+ "serde",
+]
+
+[[package]]
 name = "serde_qs"
 version = "0.12.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3254,14 +3539,24 @@ dependencies = [
 
 [[package]]
 name = "serde_spanned"
-version = "0.6.5"
+version = "0.6.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1"
+checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d"
 dependencies = [
  "serde",
 ]
 
 [[package]]
+name = "serde_tagged"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "76cd248df2ce32924bfc2273e1af035ff3092b73253fe0567230b5c4154a99e9"
+dependencies = [
+ "erased-serde",
+ "serde",
+]
+
+[[package]]
 name = "serde_urlencoded"
 version = "0.7.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3275,11 +3570,11 @@ dependencies = [
 
 [[package]]
 name = "serde_with"
-version = "3.7.0"
+version = "3.9.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ee80b0e361bbf88fd2f6e242ccd19cfda072cb0faa6ae694ecee08199938569a"
+checksum = "69cecfa94848272156ea67b2b1a53f20fc7bc638c4a46d2f8abde08f05f4b857"
 dependencies = [
- "base64",
+ "base64 0.22.1",
  "chrono",
  "hex",
  "indexmap 1.9.3",
@@ -3293,14 +3588,14 @@ dependencies = [
 
 [[package]]
 name = "serde_with_macros"
-version = "3.7.0"
+version = "3.9.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6561dc161a9224638a31d876ccdfefbc1df91d3f3a8342eddb35f055d48c7655"
+checksum = "a8fee4991ef4f274617a51ad4af30519438dacb2f56ac773b08a1922ff743350"
 dependencies = [
  "darling",
  "proc-macro2",
  "quote",
- "syn 2.0.48",
+ "syn 2.0.76",
 ]
 
 [[package]]
@@ -3335,6 +3630,12 @@ dependencies = [
 ]
 
 [[package]]
+name = "shlex"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
+
+[[package]]
 name = "signal-hook-registry"
 version = "1.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3385,9 +3686,9 @@ checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7"
 
 [[package]]
 name = "smol_str"
-version = "0.2.1"
+version = "0.2.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e6845563ada680337a52d43bb0b29f396f2d911616f6573012645b9e3d048a49"
+checksum = "dd538fb6910ac1099850255cf94a94df6551fbdd602454387d0adb2d1ca6dead"
 dependencies = [
  "serde",
 ]
@@ -3408,7 +3709,7 @@ version = "0.7.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "990079665f075b699031e9c08fd3ab99be5029b96f3b78dc0709e8f77e4efebf"
 dependencies = [
- "heck",
+ "heck 0.4.1",
  "proc-macro2",
  "quote",
  "syn 1.0.109",
@@ -3459,6 +3760,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
 
 [[package]]
+name = "strsim"
+version = "0.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
+
+[[package]]
 name = "structmeta"
 version = "0.1.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3500,9 +3807,9 @@ dependencies = [
 
 [[package]]
 name = "syn"
-version = "2.0.48"
+version = "2.0.76"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f"
+checksum = "578e081a14e0cefc3279b0472138c513f37b41a08d5a3cca9b6e4e8ceb6cd525"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -3516,24 +3823,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160"
 
 [[package]]
-name = "system-configuration"
-version = "0.5.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7"
-dependencies = [
- "bitflags 1.3.2",
- "core-foundation",
- "system-configuration-sys",
-]
-
-[[package]]
-name = "system-configuration-sys"
-version = "0.5.0"
+name = "sync_wrapper"
+version = "1.0.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9"
+checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394"
 dependencies = [
- "core-foundation-sys",
- "libc",
+ "futures-core",
 ]
 
 [[package]]
@@ -3547,15 +3842,15 @@ dependencies = [
 
 [[package]]
 name = "tempfile"
-version = "3.9.0"
+version = "3.12.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "01ce4141aa927a6d1bd34a041795abd0db1cccba5d5f24b009f694bdf3a1f3fa"
+checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64"
 dependencies = [
  "cfg-if",
  "fastrand",
- "redox_syscall 0.4.1",
+ "once_cell",
  "rustix",
- "windows-sys 0.52.0",
+ "windows-sys 0.59.0",
 ]
 
 [[package]]
@@ -3587,22 +3882,22 @@ checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233"
 
 [[package]]
 name = "thiserror"
-version = "1.0.56"
+version = "1.0.63"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad"
+checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724"
 dependencies = [
  "thiserror-impl",
 ]
 
 [[package]]
 name = "thiserror-impl"
-version = "1.0.56"
+version = "1.0.63"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471"
+checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261"
 dependencies = [
  "proc-macro2",
  "quote",
- "syn 2.0.48",
+ "syn 2.0.76",
 ]
 
 [[package]]
@@ -3616,10 +3911,19 @@ dependencies = [
 ]
 
 [[package]]
+name = "threadpool"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa"
+dependencies = [
+ "num_cpus",
+]
+
+[[package]]
 name = "time"
-version = "0.3.34"
+version = "0.3.36"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749"
+checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885"
 dependencies = [
  "deranged",
  "itoa",
@@ -3638,9 +3942,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
 
 [[package]]
 name = "time-macros"
-version = "0.2.17"
+version = "0.2.18"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774"
+checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf"
 dependencies = [
  "num-conv",
  "time-core",
@@ -3673,59 +3977,54 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
 
 [[package]]
 name = "tokio"
-version = "1.35.1"
+version = "1.39.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c89b4efa943be685f629b149f53829423f8f5531ea21249408e8e2f8671ec104"
+checksum = "9babc99b9923bfa4804bd74722ff02c0381021eafa4db9949217e3be8e84fff5"
 dependencies = [
  "backtrace",
  "bytes",
  "libc",
- "mio",
- "num_cpus",
+ "mio 1.0.2",
  "pin-project-lite",
  "signal-hook-registry",
  "socket2",
  "tokio-macros",
- "windows-sys 0.48.0",
-]
-
-[[package]]
-name = "tokio-io-timeout"
-version = "1.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf"
-dependencies = [
- "pin-project-lite",
- "tokio",
+ "windows-sys 0.52.0",
 ]
 
 [[package]]
 name = "tokio-listener"
-version = "0.4.2"
+version = "0.4.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4134661e12ec11c6276be73544a43144a357b08dfab5c41fd226e15b5bc9a6b2"
+checksum = "914a439d123292125bc806649c396d23e1aac5da4052f0d97b23137b38782f46"
 dependencies = [
+ "axum",
+ "clap",
  "document-features",
  "futures-core",
  "futures-util",
+ "hyper",
+ "hyper-util",
  "nix 0.26.4",
  "pin-project",
  "socket2",
  "tokio",
  "tokio-util",
  "tonic",
+ "tower",
+ "tower-service",
  "tracing",
 ]
 
 [[package]]
 name = "tokio-macros"
-version = "2.2.0"
+version = "2.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b"
+checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752"
 dependencies = [
  "proc-macro2",
  "quote",
- "syn 2.0.48",
+ "syn 2.0.76",
 ]
 
 [[package]]
@@ -3741,30 +4040,20 @@ dependencies = [
 
 [[package]]
 name = "tokio-rustls"
-version = "0.24.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081"
-dependencies = [
- "rustls 0.21.12",
- "tokio",
-]
-
-[[package]]
-name = "tokio-rustls"
-version = "0.25.0"
+version = "0.26.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f"
+checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4"
 dependencies = [
- "rustls 0.22.4",
+ "rustls",
  "rustls-pki-types",
  "tokio",
 ]
 
 [[package]]
 name = "tokio-stream"
-version = "0.1.14"
+version = "0.1.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842"
+checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af"
 dependencies = [
  "futures-core",
  "pin-project-lite",
@@ -3788,9 +4077,9 @@ dependencies = [
 
 [[package]]
 name = "tokio-test"
-version = "0.4.3"
+version = "0.4.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e89b3cbabd3ae862100094ae433e1def582cf86451b4e9bf83aa7ac1d8a7d719"
+checksum = "2468baabc3311435b55dd935f702f42cd1b8abb7e754fb7dfb16bd36aa88f9f7"
 dependencies = [
  "async-stream",
  "bytes",
@@ -3801,9 +4090,9 @@ dependencies = [
 
 [[package]]
 name = "tokio-util"
-version = "0.7.10"
+version = "0.7.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15"
+checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1"
 dependencies = [
  "bytes",
  "futures-core",
@@ -3811,7 +4100,6 @@ dependencies = [
  "futures-sink",
  "pin-project-lite",
  "tokio",
- "tracing",
 ]
 
 [[package]]
@@ -3822,8 +4110,20 @@ checksum = "4fb9d890e4dc9298b70f740f615f2e05b9db37dce531f6b24fb77ac993f9f217"
 dependencies = [
  "serde",
  "serde_spanned",
- "toml_datetime",
- "toml_edit",
+ "toml_datetime 0.5.1",
+ "toml_edit 0.18.1",
+]
+
+[[package]]
+name = "toml"
+version = "0.8.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e"
+dependencies = [
+ "serde",
+ "serde_spanned",
+ "toml_datetime 0.6.8",
+ "toml_edit 0.22.20",
 ]
 
 [[package]]
@@ -3836,6 +4136,15 @@ dependencies = [
 ]
 
 [[package]]
+name = "toml_datetime"
+version = "0.6.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41"
+dependencies = [
+ "serde",
+]
+
+[[package]]
 name = "toml_edit"
 version = "0.18.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3845,33 +4154,48 @@ dependencies = [
  "nom8",
  "serde",
  "serde_spanned",
- "toml_datetime",
+ "toml_datetime 0.5.1",
+]
+
+[[package]]
+name = "toml_edit"
+version = "0.22.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d"
+dependencies = [
+ "indexmap 2.1.0",
+ "serde",
+ "serde_spanned",
+ "toml_datetime 0.6.8",
+ "winnow",
 ]
 
 [[package]]
 name = "tonic"
-version = "0.11.0"
+version = "0.12.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "76c4eb7a4e9ef9d4763600161f12f5070b92a578e1b634db88a6887844c91a13"
+checksum = "c6f6ba989e4b2c58ae83d862d3a3e27690b6e3ae630d0deb59f3697f32aa88ad"
 dependencies = [
  "async-stream",
  "async-trait",
  "axum",
- "base64",
+ "base64 0.22.1",
  "bytes",
  "h2",
  "http",
  "http-body",
+ "http-body-util",
  "hyper",
  "hyper-timeout",
+ "hyper-util",
  "percent-encoding",
  "pin-project",
  "prost",
- "rustls-native-certs 0.7.0",
- "rustls-pemfile 2.1.0",
- "rustls-pki-types",
+ "rustls-native-certs",
+ "rustls-pemfile",
+ "socket2",
  "tokio",
- "tokio-rustls 0.25.0",
+ "tokio-rustls",
  "tokio-stream",
  "tower",
  "tower-layer",
@@ -3881,22 +4205,35 @@ dependencies = [
 
 [[package]]
 name = "tonic-build"
-version = "0.11.0"
+version = "0.12.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "be4ef6dd70a610078cb4e338a0f79d06bc759ff1b22d2120c2ff02ae264ba9c2"
+checksum = "fe4ee8877250136bd7e3d2331632810a4df4ea5e004656990d8d66d2f5ee8a67"
 dependencies = [
  "prettyplease",
  "proc-macro2",
  "prost-build",
  "quote",
- "syn 2.0.48",
+ "syn 2.0.76",
+]
+
+[[package]]
+name = "tonic-health"
+version = "0.12.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec0a34e6f706bae26b2b490e1da5c3f6a6ff87cae442bcbc7c881bab9631b5a7"
+dependencies = [
+ "async-stream",
+ "prost",
+ "tokio",
+ "tokio-stream",
+ "tonic",
 ]
 
 [[package]]
 name = "tonic-reflection"
-version = "0.11.0"
+version = "0.12.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "548c227bd5c0fae5925812c4ec6c66ffcfced23ea370cb823f4d18f0fc1cb6a7"
+checksum = "7b56b874eedb04f89907573b408eab1e87c1c1dce43aac6ad63742f57faa99ff"
 dependencies = [
  "prost",
  "prost-types",
@@ -3926,6 +4263,23 @@ dependencies = [
 ]
 
 [[package]]
+name = "tower-http"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5"
+dependencies = [
+ "bitflags 2.6.0",
+ "bytes",
+ "http",
+ "http-body",
+ "http-body-util",
+ "pin-project-lite",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
 name = "tower-layer"
 version = "0.3.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3957,7 +4311,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
 dependencies = [
  "proc-macro2",
  "quote",
- "syn 2.0.48",
+ "syn 2.0.76",
 ]
 
 [[package]]
@@ -3981,6 +4335,18 @@ dependencies = [
 ]
 
 [[package]]
+name = "tracing-indicatif"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "069580424efe11d97c3fef4197fa98c004fa26672cc71ad8770d224e23b1951d"
+dependencies = [
+ "indicatif",
+ "tracing",
+ "tracing-core",
+ "tracing-subscriber",
+]
+
+[[package]]
 name = "tracing-log"
 version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3999,8 +4365,26 @@ checksum = "a9be14ba1bbe4ab79e9229f7f89fab8d120b865859f10527f31c033e599d2284"
 dependencies = [
  "js-sys",
  "once_cell",
- "opentelemetry",
- "opentelemetry_sdk",
+ "opentelemetry 0.22.0",
+ "opentelemetry_sdk 0.22.1",
+ "smallvec",
+ "tracing",
+ "tracing-core",
+ "tracing-log",
+ "tracing-subscriber",
+ "web-time",
+]
+
+[[package]]
+name = "tracing-opentelemetry"
+version = "0.25.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9784ed4da7d921bc8df6963f8c80a0e4ce34ba6ba76668acadd3edbd985ff3b"
+dependencies = [
+ "js-sys",
+ "once_cell",
+ "opentelemetry 0.24.0",
+ "opentelemetry_sdk 0.24.1",
  "smallvec",
  "tracing",
  "tracing-core",
@@ -4028,18 +4412,64 @@ dependencies = [
 ]
 
 [[package]]
+name = "tracing-tracy"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c6a90519f16f55e5c62ffd5976349f10744435a919ecff83d918300575dfb69b"
+dependencies = [
+ "tracing-core",
+ "tracing-subscriber",
+ "tracy-client",
+]
+
+[[package]]
+name = "tracy-client"
+version = "0.17.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59fb931a64ff88984f86d3e9bcd1ae8843aa7fe44dd0f8097527bc172351741d"
+dependencies = [
+ "loom",
+ "once_cell",
+ "tracy-client-sys",
+]
+
+[[package]]
+name = "tracy-client-sys"
+version = "0.22.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9d104d610dfa9dd154535102cc9c6164ae1fa37842bc2d9e83f9ac82b0ae0882"
+dependencies = [
+ "cc",
+]
+
+[[package]]
 name = "try-lock"
 version = "0.2.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
 
 [[package]]
+name = "trybuild"
+version = "1.0.99"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "207aa50d36c4be8d8c6ea829478be44a372c6a77669937bb39c698e52f1491e8"
+dependencies = [
+ "glob",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "termcolor",
+ "toml 0.8.19",
+]
+
+[[package]]
 name = "tvix-build"
 version = "0.1.0"
 dependencies = [
  "bytes",
  "clap",
- "itertools 0.12.0",
+ "itertools 0.12.1",
+ "mimalloc",
  "prost",
  "prost-build",
  "rstest",
@@ -4050,8 +4480,8 @@ dependencies = [
  "tonic-build",
  "tonic-reflection",
  "tracing",
- "tracing-subscriber",
  "tvix-castore",
+ "tvix-tracing",
  "url",
 ]
 
@@ -4069,26 +4499,32 @@ dependencies = [
  "bytes",
  "data-encoding",
  "digest",
+ "erased-serde",
  "fastcdc",
  "fuse-backend-rs",
  "futures",
  "hex-literal",
+ "hyper-util",
  "lazy_static",
  "libc",
  "object_store",
- "parking_lot 0.12.2",
+ "parking_lot 0.12.3",
  "petgraph",
  "pin-project-lite",
  "prost",
  "prost-build",
+ "redb",
  "rstest",
  "rstest_reuse",
  "serde",
+ "serde_json",
  "serde_qs",
+ "serde_tagged",
  "serde_with",
  "sled",
  "tempfile",
  "thiserror",
+ "threadpool",
  "tokio",
  "tokio-retry",
  "tokio-stream",
@@ -4099,6 +4535,8 @@ dependencies = [
  "tonic-reflection",
  "tower",
  "tracing",
+ "tracing-indicatif",
+ "tvix-tracing",
  "url",
  "vhost",
  "vhost-user-backend",
@@ -4118,17 +4556,24 @@ dependencies = [
  "bytes",
  "clap",
  "dirs",
+ "expect-test",
+ "mimalloc",
  "nix-compat",
+ "rnix",
+ "rowan",
+ "rustc-hash 2.0.0",
  "rustyline",
+ "smol_str",
  "thiserror",
  "tokio",
  "tracing",
- "tracing-subscriber",
+ "tracing-indicatif",
  "tvix-build",
  "tvix-castore",
  "tvix-eval",
  "tvix-glue",
  "tvix-store",
+ "tvix-tracing",
  "wu-manber",
 ]
 
@@ -4144,11 +4589,12 @@ dependencies = [
  "data-encoding",
  "dirs",
  "genawaiter",
- "imbl",
- "itertools 0.12.0",
+ "itertools 0.12.1",
  "lazy_static",
  "lexical-core",
  "md-5",
+ "mimalloc",
+ "nohash-hasher",
  "os_str_bytes",
  "path-clean",
  "pretty_assertions",
@@ -4157,6 +4603,7 @@ dependencies = [
  "rnix",
  "rowan",
  "rstest",
+ "rustc-hash 2.0.0",
  "serde",
  "serde_json",
  "sha1",
@@ -4165,9 +4612,9 @@ dependencies = [
  "tabwriter",
  "tempfile",
  "test-strategy",
- "toml",
+ "toml 0.6.0",
  "tvix-eval-builtin-macros",
- "xml-rs",
+ "vu128",
 ]
 
 [[package]]
@@ -4187,6 +4634,7 @@ dependencies = [
  "async-compression",
  "bstr",
  "bytes",
+ "clap",
  "criterion",
  "data-encoding",
  "futures",
@@ -4194,6 +4642,7 @@ dependencies = [
  "lazy_static",
  "magic",
  "md-5",
+ "mimalloc",
  "nix 0.27.1",
  "nix-compat",
  "pin-project",
@@ -4208,12 +4657,15 @@ dependencies = [
  "thiserror",
  "tokio",
  "tokio-tar",
+ "tokio-test",
  "tokio-util",
  "tracing",
+ "tracing-indicatif",
  "tvix-build",
  "tvix-castore",
  "tvix-eval",
  "tvix-store",
+ "tvix-tracing",
  "url",
  "walkdir",
  "wu-manber",
@@ -4244,17 +4696,18 @@ dependencies = [
  "count-write",
  "data-encoding",
  "futures",
+ "hyper-util",
  "lazy_static",
  "lru",
+ "mimalloc",
  "nix-compat",
- "opentelemetry",
- "opentelemetry-otlp",
- "opentelemetry_sdk",
- "parking_lot 0.12.2",
+ "parking_lot 0.12.3",
  "pin-project-lite",
  "prost",
  "prost-build",
+ "redb",
  "reqwest",
+ "reqwest-middleware",
  "rstest",
  "rstest_reuse",
  "serde",
@@ -4270,19 +4723,51 @@ dependencies = [
  "tokio-retry",
  "tokio-stream",
  "tokio-util",
+ "toml 0.8.19",
  "tonic",
  "tonic-build",
+ "tonic-health",
  "tonic-reflection",
  "tower",
+ "tower-http",
  "tracing",
- "tracing-opentelemetry",
- "tracing-subscriber",
+ "tracing-indicatif",
  "tvix-castore",
+ "tvix-tracing",
  "url",
  "walkdir",
 ]
 
 [[package]]
+name = "tvix-tracing"
+version = "0.1.0"
+dependencies = [
+ "axum",
+ "http",
+ "indicatif",
+ "lazy_static",
+ "opentelemetry 0.24.0",
+ "opentelemetry-http",
+ "opentelemetry-otlp",
+ "opentelemetry_sdk 0.24.1",
+ "reqwest-tracing",
+ "thiserror",
+ "tokio",
+ "tonic",
+ "tracing",
+ "tracing-indicatif",
+ "tracing-opentelemetry 0.25.0",
+ "tracing-subscriber",
+ "tracing-tracy",
+]
+
+[[package]]
+name = "typeid"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "059d83cc991e7a42fc37bd50941885db0888e34209f8cfd9aab07ddec03bc9cf"
+
+[[package]]
 name = "typenum"
 version = "1.17.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -4309,7 +4794,7 @@ checksum = "ac73887f47b9312552aa90ef477927ff014d63d1920ca8037c6c1951eab64bb1"
 dependencies = [
  "proc-macro2",
  "quote",
- "syn 2.0.48",
+ "syn 2.0.76",
 ]
 
 [[package]]
@@ -4368,9 +4853,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
 
 [[package]]
 name = "url"
-version = "2.5.0"
+version = "2.5.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633"
+checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c"
 dependencies = [
  "form_urlencoded",
  "idna",
@@ -4489,19 +4974,49 @@ dependencies = [
 ]
 
 [[package]]
-name = "wait-timeout"
-version = "0.2.0"
+name = "vt100"
+version = "0.15.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6"
+checksum = "84cd863bf0db7e392ba3bd04994be3473491b31e66340672af5d11943c6274de"
 dependencies = [
- "libc",
+ "itoa",
+ "log",
+ "unicode-width",
+ "vte",
 ]
 
 [[package]]
+name = "vte"
+version = "0.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f5022b5fbf9407086c180e9557be968742d839e68346af7792b8592489732197"
+dependencies = [
+ "arrayvec",
+ "utf8parse",
+ "vte_generate_state_changes",
+]
+
+[[package]]
+name = "vte_generate_state_changes"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d257817081c7dffcdbab24b9e62d2def62e2ff7d00b1c20062551e6cccc145ff"
+dependencies = [
+ "proc-macro2",
+ "quote",
+]
+
+[[package]]
+name = "vu128"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b18da3bd753c6f4373511e5f025423986560dfe4a5e7d642cc9a0266847f9fdd"
+
+[[package]]
 name = "walkdir"
-version = "2.4.0"
+version = "2.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee"
+checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
 dependencies = [
  "same-file",
  "winapi-util",
@@ -4543,7 +5058,7 @@ dependencies = [
  "once_cell",
  "proc-macro2",
  "quote",
- "syn 2.0.48",
+ "syn 2.0.76",
  "wasm-bindgen-shared",
 ]
 
@@ -4577,7 +5092,7 @@ checksum = "bae1abb6806dc1ad9e560ed242107c0f6c84335f1749dd4e8ddb012ebd5e25a7"
 dependencies = [
  "proc-macro2",
  "quote",
- "syn 2.0.48",
+ "syn 2.0.76",
  "wasm-bindgen-backend",
  "wasm-bindgen-shared",
 ]
@@ -4590,9 +5105,9 @@ checksum = "4d91413b1c31d7539ba5ef2451af3f0b833a005eb27a631cec32bc0635a8602b"
 
 [[package]]
 name = "wasm-streams"
-version = "0.3.0"
+version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b4609d447824375f43e1ffbc051b50ad8f4b3ae8219680c94452ea05eb240ac7"
+checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129"
 dependencies = [
  "futures-util",
  "js-sys",
@@ -4622,31 +5137,6 @@ dependencies = [
 ]
 
 [[package]]
-name = "which"
-version = "4.4.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7"
-dependencies = [
- "either",
- "home",
- "once_cell",
- "rustix",
-]
-
-[[package]]
-name = "which"
-version = "5.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9bf3ea8596f3a0dd5980b46430f2058dfe2c36a27ccfbb1845d6fbfcd9ba6e14"
-dependencies = [
- "either",
- "home",
- "once_cell",
- "rustix",
- "windows-sys 0.48.0",
-]
-
-[[package]]
 name = "winapi"
 version = "0.3.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -4678,12 +5168,71 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
 
 [[package]]
+name = "windows"
+version = "0.54.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9252e5725dbed82865af151df558e754e4a3c2c30818359eb17465f1346a1b49"
+dependencies = [
+ "windows-core 0.54.0",
+ "windows-targets 0.52.6",
+]
+
+[[package]]
 name = "windows-core"
 version = "0.52.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9"
 dependencies = [
- "windows-targets 0.52.0",
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "windows-core"
+version = "0.54.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "12661b9c89351d684a50a8a643ce5f608e20243b9fb84687800163429f161d65"
+dependencies = [
+ "windows-result 0.1.2",
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "windows-registry"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0"
+dependencies = [
+ "windows-result 0.2.0",
+ "windows-strings",
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "windows-result"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8"
+dependencies = [
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "windows-result"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e"
+dependencies = [
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "windows-strings"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10"
+dependencies = [
+ "windows-result 0.2.0",
+ "windows-targets 0.52.6",
 ]
 
 [[package]]
@@ -4701,7 +5250,16 @@ version = "0.52.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
 dependencies = [
- "windows-targets 0.52.0",
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.59.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
+dependencies = [
+ "windows-targets 0.52.6",
 ]
 
 [[package]]
@@ -4721,17 +5279,18 @@ dependencies = [
 
 [[package]]
 name = "windows-targets"
-version = "0.52.0"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd"
+checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
 dependencies = [
- "windows_aarch64_gnullvm 0.52.0",
- "windows_aarch64_msvc 0.52.0",
- "windows_i686_gnu 0.52.0",
- "windows_i686_msvc 0.52.0",
- "windows_x86_64_gnu 0.52.0",
- "windows_x86_64_gnullvm 0.52.0",
- "windows_x86_64_msvc 0.52.0",
+ "windows_aarch64_gnullvm 0.52.6",
+ "windows_aarch64_msvc 0.52.6",
+ "windows_i686_gnu 0.52.6",
+ "windows_i686_gnullvm",
+ "windows_i686_msvc 0.52.6",
+ "windows_x86_64_gnu 0.52.6",
+ "windows_x86_64_gnullvm 0.52.6",
+ "windows_x86_64_msvc 0.52.6",
 ]
 
 [[package]]
@@ -4742,9 +5301,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
 
 [[package]]
 name = "windows_aarch64_gnullvm"
-version = "0.52.0"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea"
+checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
 
 [[package]]
 name = "windows_aarch64_msvc"
@@ -4754,9 +5313,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
 
 [[package]]
 name = "windows_aarch64_msvc"
-version = "0.52.0"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef"
+checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
 
 [[package]]
 name = "windows_i686_gnu"
@@ -4766,9 +5325,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
 
 [[package]]
 name = "windows_i686_gnu"
-version = "0.52.0"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313"
+checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
 
 [[package]]
 name = "windows_i686_msvc"
@@ -4778,9 +5343,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
 
 [[package]]
 name = "windows_i686_msvc"
-version = "0.52.0"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a"
+checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
 
 [[package]]
 name = "windows_x86_64_gnu"
@@ -4790,9 +5355,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
 
 [[package]]
 name = "windows_x86_64_gnu"
-version = "0.52.0"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd"
+checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
 
 [[package]]
 name = "windows_x86_64_gnullvm"
@@ -4802,9 +5367,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
 
 [[package]]
 name = "windows_x86_64_gnullvm"
-version = "0.52.0"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e"
+checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
 
 [[package]]
 name = "windows_x86_64_msvc"
@@ -4814,18 +5379,17 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
 
 [[package]]
 name = "windows_x86_64_msvc"
-version = "0.52.0"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"
+checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
 
 [[package]]
-name = "winreg"
-version = "0.50.0"
+name = "winnow"
+version = "0.6.18"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1"
+checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f"
 dependencies = [
- "cfg-if",
- "windows-sys 0.48.0",
+ "memchr",
 ]
 
 [[package]]
@@ -4845,12 +5409,6 @@ dependencies = [
 ]
 
 [[package]]
-name = "xml-rs"
-version = "0.8.19"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0fcb9cbac069e033553e8bb871be2fbdffcab578eb25bd0f7c508cedc6dcd75a"
-
-[[package]]
 name = "xz2"
 version = "0.1.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -4882,7 +5440,7 @@ checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b"
 dependencies = [
  "proc-macro2",
  "quote",
- "syn 2.0.48",
+ "syn 2.0.76",
 ]
 
 [[package]]
@@ -4893,27 +5451,27 @@ checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d"
 
 [[package]]
 name = "zstd"
-version = "0.13.0"
+version = "0.13.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bffb3309596d527cfcba7dfc6ed6052f1d39dfbd7c867aa2e865e4a449c10110"
+checksum = "fcf2b778a664581e31e389454a7072dab1647606d44f7feea22cd5abb9c9f3f9"
 dependencies = [
  "zstd-safe",
 ]
 
 [[package]]
 name = "zstd-safe"
-version = "7.0.0"
+version = "7.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "43747c7422e2924c11144d5229878b98180ef8b06cca4ab5af37afc8a8d8ea3e"
+checksum = "54a3ab4db68cea366acc5c897c7b4d4d1b8994a9cd6e6f841f8964566a419059"
 dependencies = [
  "zstd-sys",
 ]
 
 [[package]]
 name = "zstd-sys"
-version = "2.0.9+zstd.1.5.5"
+version = "2.0.13+zstd.1.5.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9e16efa8a874a0481a574084d34cc26fdb3b99627480f785888deb6386506656"
+checksum = "38ff0f21cfee8f97d94cef41359e0c89aa6113028ab0291aa8ca0038995a95aa"
 dependencies = [
  "cc",
  "pkg-config",
diff --git a/tvix/Cargo.nix b/tvix/Cargo.nix
index f6c3108fa422..5671624ceceb 100644
--- a/tvix/Cargo.nix
+++ b/tvix/Cargo.nix
@@ -1,4 +1,4 @@
-# This file was @generated by crate2nix 0.14.0 with the command:
+# This file was @generated by crate2nix 0.14.1 with the command:
 #   "generate" "--all-features"
 # See https://github.com/kolloch/crate2nix for more info.
 
@@ -13,6 +13,8 @@
 , rootFeatures ? [ "default" ]
   # If true, throw errors instead of issueing deprecation warnings.
 , strictDeprecation ? false
+  # Elements to add to the `-C target-feature=` argument passed to `rustc`
+  # (separated by `,`, prefixed with `+`).
   # Used for conditional compilation based on CPU feature detection.
 , targetFeatures ? [ ]
   # Whether to perform release builds: longer compile times, faster binaries.
@@ -33,6 +35,16 @@ rec {
   # You can override the features with
   # workspaceMembers."${crateName}".build.override { features = [ "default" "feature1" ... ]; }.
   workspaceMembers = {
+    "nar-bridge" = rec {
+      packageId = "nar-bridge";
+      build = internal.buildRustCrateWithFeatures {
+        packageId = "nar-bridge";
+      };
+
+      # Debug support which might change between releases.
+      # File a bug if you depend on any for non-debug work!
+      debug = internal.debugCrate { inherit packageId; };
+    };
     "nix-compat" = rec {
       packageId = "nix-compat";
       build = internal.buildRustCrateWithFeatures {
@@ -43,6 +55,26 @@ rec {
       # File a bug if you depend on any for non-debug work!
       debug = internal.debugCrate { inherit packageId; };
     };
+    "nix-compat-derive" = rec {
+      packageId = "nix-compat-derive";
+      build = internal.buildRustCrateWithFeatures {
+        packageId = "nix-compat-derive";
+      };
+
+      # Debug support which might change between releases.
+      # File a bug if you depend on any for non-debug work!
+      debug = internal.debugCrate { inherit packageId; };
+    };
+    "nix-compat-derive-tests" = rec {
+      packageId = "nix-compat-derive-tests";
+      build = internal.buildRustCrateWithFeatures {
+        packageId = "nix-compat-derive-tests";
+      };
+
+      # Debug support which might change between releases.
+      # File a bug if you depend on any for non-debug work!
+      debug = internal.debugCrate { inherit packageId; };
+    };
     "tvix-build" = rec {
       packageId = "tvix-build";
       build = internal.buildRustCrateWithFeatures {
@@ -123,6 +155,16 @@ rec {
       # File a bug if you depend on any for non-debug work!
       debug = internal.debugCrate { inherit packageId; };
     };
+    "tvix-tracing" = rec {
+      packageId = "tvix-tracing";
+      build = internal.buildRustCrateWithFeatures {
+        packageId = "tvix-tracing";
+      };
+
+      # Debug support which might change between releases.
+      # File a bug if you depend on any for non-debug work!
+      debug = internal.debugCrate { inherit packageId; };
+    };
   };
 
   # A derivation that joins the outputs of all workspace members together.
@@ -259,13 +301,14 @@ rec {
           "perf-literal" = [ "dep:memchr" ];
           "std" = [ "memchr?/std" ];
         };
-        resolvedDefaultFeatures = [ "default" "perf-literal" "std" ];
+        resolvedDefaultFeatures = [ "perf-literal" "std" ];
       };
       "allocator-api2" = rec {
         crateName = "allocator-api2";
         version = "0.2.18";
         edition = "2018";
         sha256 = "0kr6lfnxvnj164j1x38g97qjlhb7akppqzvgfs0697140ixbav2w";
+        libName = "allocator_api2";
         authors = [
           "Zakarum <zaq.dev@icloud.com>"
         ];
@@ -281,6 +324,7 @@ rec {
         version = "0.1.1";
         edition = "2018";
         sha256 = "1w7ynjxrfs97xg3qlcdns4kgfpwcdv824g611fq32cag4cdr96g9";
+        libName = "android_tzdata";
         authors = [
           "RumovZ"
         ];
@@ -318,9 +362,9 @@ rec {
       };
       "anstream" = rec {
         crateName = "anstream";
-        version = "0.6.11";
+        version = "0.6.15";
         edition = "2021";
-        sha256 = "19dndamalavhjwp4i74k8hdijcixb7gsfa6ycwyc1r8xn6y1wbkf";
+        sha256 = "09nm4qj34kiwgzczdvj14x7hgsb235g4sqsay3xsz7zqn4d5rqb4";
         dependencies = [
           {
             name = "anstyle";
@@ -346,6 +390,10 @@ rec {
             packageId = "colorchoice";
           }
           {
+            name = "is_terminal_polyfill";
+            packageId = "is_terminal_polyfill";
+          }
+          {
             name = "utf8parse";
             packageId = "utf8parse";
           }
@@ -359,9 +407,9 @@ rec {
       };
       "anstyle" = rec {
         crateName = "anstyle";
-        version = "1.0.4";
+        version = "1.0.8";
         edition = "2021";
-        sha256 = "11yxw02b6parn29s757z96rgiqbn8qy0fk9a3p3bhczm85dhfybh";
+        sha256 = "1cfmkza63xpn1kkz844mgjwm9miaiz4jkyczmwxzivcsypk1vv0v";
         features = {
           "default" = [ "std" ];
         };
@@ -369,9 +417,10 @@ rec {
       };
       "anstyle-parse" = rec {
         crateName = "anstyle-parse";
-        version = "0.2.3";
+        version = "0.2.5";
         edition = "2021";
-        sha256 = "134jhzrz89labrdwxxnjxqjdg06qvaflj1wkfnmyapwyldfwcnn7";
+        sha256 = "1jy12rvgbldflnb2x7mcww9dcffw1mx22nyv6p3n7d62h0gdwizb";
+        libName = "anstyle_parse";
         dependencies = [
           {
             name = "utf8parse";
@@ -388,9 +437,10 @@ rec {
       };
       "anstyle-query" = rec {
         crateName = "anstyle-query";
-        version = "1.0.2";
+        version = "1.1.1";
         edition = "2021";
-        sha256 = "0j3na4b1nma39g4x7cwvj009awxckjf3z2vkwhldgka44hqj72g2";
+        sha256 = "0aj22iy4pzk6mz745sfrm1ym14r0y892jhcrbs8nkj7nqx9gqdkd";
+        libName = "anstyle_query";
         dependencies = [
           {
             name = "windows-sys";
@@ -403,9 +453,10 @@ rec {
       };
       "anstyle-wincon" = rec {
         crateName = "anstyle-wincon";
-        version = "3.0.2";
+        version = "3.0.4";
         edition = "2021";
-        sha256 = "19v0fv400bmp4niqpzxnhg83vz12mmqv7l2l8vi80qcdxj0lpm8w";
+        sha256 = "1y2pkvsrdxbcwircahb4wimans2pzmwwxad7ikdhj5lpdqdlxxsv";
+        libName = "anstyle_wincon";
         dependencies = [
           {
             name = "anstyle";
@@ -422,9 +473,9 @@ rec {
       };
       "anyhow" = rec {
         crateName = "anyhow";
-        version = "1.0.79";
+        version = "1.0.86";
         edition = "2018";
-        sha256 = "1ji5irqiwr8yprgqj8zvnli7zd7fz9kzaiddq44jnrl2l289h3h8";
+        sha256 = "1nk301x8qhpdaks6a9zvcp7yakjqnczjmqndbg7vk4494d3d1ldk";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -439,6 +490,7 @@ rec {
         version = "1.6.0";
         edition = "2018";
         sha256 = "19n9j146bpxs9phyh48gmlh9jjsdijr9p9br04qms0g9ypfsvp5x";
+        libName = "arc_swap";
         authors = [
           "Michal 'vorner' Vaner <vorner@vorner.cz>"
         ];
@@ -475,6 +527,7 @@ rec {
         version = "2.2.0";
         edition = "2021";
         sha256 = "1hzhkbrlmgbrrwb1d5aba5f03p42s6z80g5p38s127c27nj470pj";
+        libName = "async_channel";
         authors = [
           "Stjepan Glavina <stjepang@gmail.com>"
         ];
@@ -512,9 +565,10 @@ rec {
       };
       "async-compression" = rec {
         crateName = "async-compression";
-        version = "0.4.9";
+        version = "0.4.12";
         edition = "2018";
-        sha256 = "14r6vbsbbkqjiqy0qwwywjakdi29jfyidhqp389l5r4gm7bsp7jf";
+        sha256 = "1ap403q889b1gghca7vigvydsnf8vr94xz3d488p9i9b9vv39hgy";
+        libName = "async_compression";
         authors = [
           "Wim Looman <wim@nemo157.com>"
           "Allen Bui <fairingrey@gmail.com>"
@@ -604,6 +658,7 @@ rec {
         version = "2.3.2";
         edition = "2021";
         sha256 = "110847w0ycfhklm3i928avd28x7lf9amblr2wjngi8ngk7sv1k6w";
+        libName = "async_io";
         authors = [
           "Stjepan Glavina <stjepang@gmail.com>"
         ];
@@ -668,6 +723,7 @@ rec {
         version = "2.8.0";
         edition = "2018";
         sha256 = "0asq5xdzgp3d5m82y5rg7a0k9q0g95jy6mgc7ivl334x7qlp4wi8";
+        libName = "async_lock";
         authors = [
           "Stjepan Glavina <stjepang@gmail.com>"
         ];
@@ -684,6 +740,7 @@ rec {
         version = "3.3.0";
         edition = "2021";
         sha256 = "0yxflkfw46rad4lv86f59b5z555dlfmg1riz1n8830rgi0qb8d6h";
+        libName = "async_lock";
         authors = [
           "Stjepan Glavina <stjepang@gmail.com>"
         ];
@@ -711,9 +768,10 @@ rec {
       };
       "async-process" = rec {
         crateName = "async-process";
-        version = "2.1.0";
+        version = "2.2.4";
         edition = "2021";
-        sha256 = "1j0cfac9p3kq5dclfzlz6jv5l29kwflh9nvr3ivmdg8ih3v3q7j5";
+        sha256 = "0x3305pq0fzaqmw7q4c93sgabq97zhkr32xig5dkhkcscn4pg858";
+        libName = "async_process";
         authors = [
           "Stjepan Glavina <stjepang@gmail.com>"
         ];
@@ -721,6 +779,11 @@ rec {
           {
             name = "async-channel";
             packageId = "async-channel";
+            target = { target, features }: ("linux" == target."os" or null);
+          }
+          {
+            name = "async-channel";
+            packageId = "async-channel";
             target = { target, features }: (target."windows" or false);
           }
           {
@@ -738,6 +801,11 @@ rec {
             target = { target, features }: (target."unix" or false);
           }
           {
+            name = "async-task";
+            packageId = "async-task";
+            target = { target, features }: ("linux" == target."os" or null);
+          }
+          {
             name = "blocking";
             packageId = "blocking";
             target = { target, features }: (target."windows" or false);
@@ -758,12 +826,24 @@ rec {
             name = "rustix";
             packageId = "rustix";
             usesDefaultFeatures = false;
+            target = { target, features }: ((target."unix" or false) && (!("linux" == target."os" or null)));
+            features = [ "std" "fs" "process" ];
+          }
+          {
+            name = "rustix";
+            packageId = "rustix";
+            usesDefaultFeatures = false;
             target = { target, features }: (target."unix" or false);
             features = [ "std" "fs" ];
           }
           {
+            name = "tracing";
+            packageId = "tracing";
+            usesDefaultFeatures = false;
+          }
+          {
             name = "windows-sys";
-            packageId = "windows-sys 0.52.0";
+            packageId = "windows-sys 0.59.0";
             usesDefaultFeatures = false;
             target = { target, features }: (target."windows" or false);
             features = [ "Win32_Foundation" "Win32_System_Threading" ];
@@ -782,6 +862,7 @@ rec {
         version = "0.2.5";
         edition = "2018";
         sha256 = "1i9466hiqghhmljjnn83a8vnxi8z013xga03f59c89d2cl7xjiwy";
+        libName = "async_signal";
         authors = [
           "John Nunley <dev@notgull.net>"
         ];
@@ -852,6 +933,7 @@ rec {
         version = "0.3.5";
         edition = "2018";
         sha256 = "0l8sjq1rylkb1ak0pdyjn83b3k6x36j22myngl4sqqgg7whdsmnd";
+        libName = "async_stream";
         authors = [
           "Carl Lerche <me@carllerche.com>"
         ];
@@ -877,6 +959,7 @@ rec {
         edition = "2018";
         sha256 = "14q179j4y8p2z1d0ic6aqgy9fhwz8p9cai1ia8kpw4bw7q12mrhn";
         procMacro = true;
+        libName = "async_stream_impl";
         authors = [
           "Carl Lerche <me@carllerche.com>"
         ];
@@ -891,7 +974,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             features = [ "full" "visit-mut" ];
           }
         ];
@@ -902,6 +985,7 @@ rec {
         version = "4.7.0";
         edition = "2018";
         sha256 = "16975vx6aqy5yf16fs9xz5vx1zq8mwkzfmykvcilc1j7b6c6xczv";
+        libName = "async_task";
         authors = [
           "Stjepan Glavina <stjepang@gmail.com>"
         ];
@@ -916,6 +1000,7 @@ rec {
         version = "0.4.0";
         edition = "2021";
         sha256 = "16zx4qcwzq94n13pp6xwa4589apm5y8j20jb7lk4yzn42fqlnzdk";
+        libName = "async_tempfile";
         authors = [
           "Markus Mayer"
         ];
@@ -951,6 +1036,7 @@ rec {
         edition = "2021";
         sha256 = "1adf1jh2yg39rkpmqjqyr9xyd6849p0d95425i6imgbhx0syx069";
         procMacro = true;
+        libName = "async_trait";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -965,7 +1051,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             features = [ "full" "visit-mut" ];
           }
         ];
@@ -976,6 +1062,7 @@ rec {
         version = "1.1.2";
         edition = "2018";
         sha256 = "1h5av1lw56m0jf0fd3bchxq8a30xv0b4wv8s4zkp4s0i7mfvs18m";
+        libName = "atomic_waker";
         authors = [
           "Stjepan Glavina <stjepang@gmail.com>"
           "Contributors to futures-rs"
@@ -996,9 +1083,9 @@ rec {
       };
       "axum" = rec {
         crateName = "axum";
-        version = "0.6.20";
+        version = "0.7.5";
         edition = "2021";
-        sha256 = "1gynqkg3dcy1zd7il69h8a3zax86v6qq5zpawqyn87mr6979x0iv";
+        sha256 = "1kyb7pzgn60crl9wyq7dhciv40sxdr1mbqx2r4s7g9j253qrlv1s";
         dependencies = [
           {
             name = "async-trait";
@@ -1009,10 +1096,6 @@ rec {
             packageId = "axum-core";
           }
           {
-            name = "bitflags";
-            packageId = "bitflags 1.3.2";
-          }
-          {
             name = "bytes";
             packageId = "bytes";
           }
@@ -1031,9 +1114,19 @@ rec {
             packageId = "http-body";
           }
           {
+            name = "http-body-util";
+            packageId = "http-body-util";
+          }
+          {
             name = "hyper";
             packageId = "hyper";
-            features = [ "stream" ];
+            optional = true;
+          }
+          {
+            name = "hyper-util";
+            packageId = "hyper-util";
+            optional = true;
+            features = [ "tokio" "server" ];
           }
           {
             name = "itoa";
@@ -1041,7 +1134,7 @@ rec {
           }
           {
             name = "matchit";
-            packageId = "matchit";
+            packageId = "matchit 0.7.3";
           }
           {
             name = "memchr";
@@ -1064,8 +1157,31 @@ rec {
             packageId = "serde";
           }
           {
+            name = "serde_json";
+            packageId = "serde_json";
+            optional = true;
+            features = [ "raw_value" ];
+          }
+          {
+            name = "serde_path_to_error";
+            packageId = "serde_path_to_error";
+            optional = true;
+          }
+          {
+            name = "serde_urlencoded";
+            packageId = "serde_urlencoded";
+            optional = true;
+          }
+          {
             name = "sync_wrapper";
-            packageId = "sync_wrapper";
+            packageId = "sync_wrapper 1.0.1";
+          }
+          {
+            name = "tokio";
+            packageId = "tokio";
+            rename = "tokio";
+            optional = true;
+            features = [ "time" ];
           }
           {
             name = "tower";
@@ -1081,6 +1197,12 @@ rec {
             name = "tower-service";
             packageId = "tower-service";
           }
+          {
+            name = "tracing";
+            packageId = "tracing";
+            optional = true;
+            usesDefaultFeatures = false;
+          }
         ];
         buildDependencies = [
           {
@@ -1099,34 +1221,49 @@ rec {
             features = [ "derive" ];
           }
           {
+            name = "serde_json";
+            packageId = "serde_json";
+          }
+          {
+            name = "tokio";
+            packageId = "tokio";
+            rename = "tokio";
+            features = [ "macros" "rt" "rt-multi-thread" "net" "test-util" ];
+          }
+          {
             name = "tower";
             packageId = "tower";
             rename = "tower";
             features = [ "util" "timeout" "limit" "load-shed" "steer" "filter" ];
           }
+          {
+            name = "tracing";
+            packageId = "tracing";
+          }
         ];
         features = {
           "__private_docs" = [ "tower/full" "dep:tower-http" ];
-          "default" = [ "form" "http1" "json" "matched-path" "original-uri" "query" "tokio" "tower-log" ];
+          "default" = [ "form" "http1" "json" "matched-path" "original-uri" "query" "tokio" "tower-log" "tracing" ];
           "form" = [ "dep:serde_urlencoded" ];
-          "headers" = [ "dep:headers" ];
-          "http1" = [ "hyper/http1" ];
-          "http2" = [ "hyper/http2" ];
+          "http1" = [ "dep:hyper" "hyper?/http1" "hyper-util?/http1" ];
+          "http2" = [ "dep:hyper" "hyper?/http2" "hyper-util?/http2" ];
           "json" = [ "dep:serde_json" "dep:serde_path_to_error" ];
           "macros" = [ "dep:axum-macros" ];
           "multipart" = [ "dep:multer" ];
           "query" = [ "dep:serde_urlencoded" ];
-          "tokio" = [ "dep:tokio" "hyper/server" "hyper/tcp" "hyper/runtime" "tower/make" ];
+          "tokio" = [ "dep:hyper-util" "dep:tokio" "tokio/net" "tokio/rt" "tower/make" "tokio/macros" ];
           "tower-log" = [ "tower/log" ];
           "tracing" = [ "dep:tracing" "axum-core/tracing" ];
-          "ws" = [ "tokio" "dep:tokio-tungstenite" "dep:sha1" "dep:base64" ];
+          "ws" = [ "dep:hyper" "tokio" "dep:tokio-tungstenite" "dep:sha1" "dep:base64" ];
         };
+        resolvedDefaultFeatures = [ "default" "form" "http1" "http2" "json" "matched-path" "original-uri" "query" "tokio" "tower-log" "tracing" ];
       };
       "axum-core" = rec {
         crateName = "axum-core";
-        version = "0.3.4";
+        version = "0.4.3";
         edition = "2021";
-        sha256 = "0b1d9nkqb8znaba4qqzxzc968qwj4ybn4vgpyz9lz4a7l9vsb7vm";
+        sha256 = "1qx28wg4j6qdcdrisqwyaavlzc0zvbsrcwa99zf9456lfbyn6p51";
+        libName = "axum_core";
         dependencies = [
           {
             name = "async-trait";
@@ -1151,10 +1288,22 @@ rec {
             packageId = "http-body";
           }
           {
+            name = "http-body-util";
+            packageId = "http-body-util";
+          }
+          {
             name = "mime";
             packageId = "mime";
           }
           {
+            name = "pin-project-lite";
+            packageId = "pin-project-lite";
+          }
+          {
+            name = "sync_wrapper";
+            packageId = "sync_wrapper 0.1.2";
+          }
+          {
             name = "tower-layer";
             packageId = "tower-layer";
           }
@@ -1162,6 +1311,12 @@ rec {
             name = "tower-service";
             packageId = "tower-service";
           }
+          {
+            name = "tracing";
+            packageId = "tracing";
+            optional = true;
+            usesDefaultFeatures = false;
+          }
         ];
         buildDependencies = [
           {
@@ -1181,6 +1336,7 @@ rec {
           "__private_docs" = [ "dep:tower-http" ];
           "tracing" = [ "dep:tracing" ];
         };
+        resolvedDefaultFeatures = [ "tracing" ];
       };
       "backtrace" = rec {
         crateName = "backtrace";
@@ -1243,7 +1399,7 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "std" ];
       };
-      "base64" = rec {
+      "base64 0.21.7" = rec {
         crateName = "base64";
         version = "0.21.7";
         edition = "2018";
@@ -1256,6 +1412,20 @@ rec {
           "default" = [ "std" ];
           "std" = [ "alloc" ];
         };
+        resolvedDefaultFeatures = [ "alloc" "std" ];
+      };
+      "base64 0.22.1" = rec {
+        crateName = "base64";
+        version = "0.22.1";
+        edition = "2018";
+        sha256 = "1imqzgh7bxcikp5vx3shqvw9j09g9ly0xr0jma0q66i52r7jbcvj";
+        authors = [
+          "Marshall Pierce <marshall@mpierce.org>"
+        ];
+        features = {
+          "default" = [ "std" ];
+          "std" = [ "alloc" ];
+        };
         resolvedDefaultFeatures = [ "alloc" "default" "std" ];
       };
       "base64ct" = rec {
@@ -1273,13 +1443,13 @@ rec {
       };
       "bigtable_rs" = rec {
         crateName = "bigtable_rs";
-        version = "0.2.9";
+        version = "0.2.10";
         edition = "2021";
         workspace_member = null;
         src = pkgs.fetchgit {
-          url = "https://github.com/flokli/bigtable_rs";
-          rev = "0af404741dfc40eb9fa99cf4d4140a09c5c20df7";
-          sha256 = "1njjam1lx2xlnm7a41lga8601vmjgqz0fvc77x24gd04pc7avxll";
+          url = "https://github.com/liufuyang/bigtable_rs";
+          rev = "1818355a5373a5bc2c84287e3a4e3807154ac8ef";
+          sha256 = "0mn6iw1z7gdxbarsqiwscbdr25nplwlvzs0rs51vgnnjfsnbgl6q";
         };
         authors = [
           "Fuyang Liu <liufuyang@users.noreply.github.com>"
@@ -1294,6 +1464,11 @@ rec {
             packageId = "http";
           }
           {
+            name = "hyper-util";
+            packageId = "hyper-util";
+            features = [ "tokio" ];
+          }
+          {
             name = "log";
             packageId = "log";
           }
@@ -1348,8 +1523,8 @@ rec {
             packageId = "prost-build";
           }
           {
-            name = "prost-wkt-build";
-            packageId = "prost-wkt-build";
+            name = "prost-wkt-types";
+            packageId = "prost-wkt-types";
           }
           {
             name = "tonic-build";
@@ -1359,43 +1534,6 @@ rec {
         ];
 
       };
-      "bit-set" = rec {
-        crateName = "bit-set";
-        version = "0.5.3";
-        edition = "2015";
-        sha256 = "1wcm9vxi00ma4rcxkl3pzzjli6ihrpn9cfdi0c5b4cvga2mxs007";
-        authors = [
-          "Alexis Beingessner <a.beingessner@gmail.com>"
-        ];
-        dependencies = [
-          {
-            name = "bit-vec";
-            packageId = "bit-vec";
-            usesDefaultFeatures = false;
-          }
-        ];
-        features = {
-          "default" = [ "std" ];
-          "std" = [ "bit-vec/std" ];
-        };
-        resolvedDefaultFeatures = [ "default" "std" ];
-      };
-      "bit-vec" = rec {
-        crateName = "bit-vec";
-        version = "0.6.3";
-        edition = "2015";
-        sha256 = "1ywqjnv60cdh1slhz67psnp422md6jdliji6alq0gmly2xm9p7rl";
-        authors = [
-          "Alexis Beingessner <a.beingessner@gmail.com>"
-        ];
-        features = {
-          "default" = [ "std" ];
-          "serde" = [ "dep:serde" ];
-          "serde_no_std" = [ "serde/alloc" ];
-          "serde_std" = [ "std" "serde/std" ];
-        };
-        resolvedDefaultFeatures = [ "default" "std" ];
-      };
       "bitflags 1.3.2" = rec {
         crateName = "bitflags";
         version = "1.3.2";
@@ -1411,11 +1549,11 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" ];
       };
-      "bitflags 2.4.2" = rec {
+      "bitflags 2.6.0" = rec {
         crateName = "bitflags";
-        version = "2.4.2";
+        version = "2.6.0";
         edition = "2021";
-        sha256 = "1pqd142hyqlzr7p9djxq2ff0jx07a2sb2xp9lhw69cbf80s0jmzd";
+        sha256 = "1pkidwzn3hnxlsl8zizh0bncgbjnw7c41cx7bby26ncbzmiznj5h";
         authors = [
           "The Rust Project Developers"
         ];
@@ -1429,24 +1567,11 @@ rec {
         };
         resolvedDefaultFeatures = [ "std" ];
       };
-      "bitmaps" = rec {
-        crateName = "bitmaps";
-        version = "3.2.0";
-        edition = "2021";
-        sha256 = "00ql08pm4l9hizkldyy54v0pk96g7zg8x6i72c2vkcq0iawl4dkh";
-        authors = [
-          "Bodil Stokke <bodil@bodil.org>"
-        ];
-        features = {
-          "default" = [ "std" ];
-        };
-        resolvedDefaultFeatures = [ "default" "std" ];
-      };
       "blake3" = rec {
         crateName = "blake3";
-        version = "1.5.0";
+        version = "1.5.4";
         edition = "2021";
-        sha256 = "11ysh12zcqq6xkjxh5cbrmnwzalprm3z552i5ff7wm5za9hz0c82";
+        sha256 = "1xy6gp8yfcpvzwrhbx5iksxxwf6hsix403klizgr1s6qgwj3686q";
         authors = [
           "Jack O'Connor <oconnor663@gmail.com>"
           "Samuel Neves"
@@ -1476,8 +1601,8 @@ rec {
             features = [ "mac" ];
           }
           {
-            name = "rayon";
-            packageId = "rayon";
+            name = "rayon-core";
+            packageId = "rayon-core";
             optional = true;
           }
         ];
@@ -1489,20 +1614,20 @@ rec {
         ];
         features = {
           "default" = [ "std" ];
-          "digest" = [ "dep:digest" ];
           "mmap" = [ "std" "dep:memmap2" ];
-          "rayon" = [ "dep:rayon" "std" ];
+          "rayon" = [ "dep:rayon-core" "std" ];
           "serde" = [ "dep:serde" ];
-          "traits-preview" = [ "digest" ];
+          "traits-preview" = [ "dep:digest" ];
           "zeroize" = [ "dep:zeroize" "arrayvec/zeroize" ];
         };
-        resolvedDefaultFeatures = [ "default" "digest" "rayon" "std" "traits-preview" ];
+        resolvedDefaultFeatures = [ "default" "rayon" "std" "traits-preview" ];
       };
       "block-buffer" = rec {
         crateName = "block-buffer";
         version = "0.10.4";
         edition = "2018";
         sha256 = "0w9sa2ypmrsqqvc20nhwr75wbb5cjr4kkyhpjm1z1lv2kdicfy1h";
+        libName = "block_buffer";
         authors = [
           "RustCrypto Developers"
         ];
@@ -1571,9 +1696,9 @@ rec {
       };
       "bstr" = rec {
         crateName = "bstr";
-        version = "1.9.0";
+        version = "1.10.0";
         edition = "2021";
-        sha256 = "1p6hzf3wqwwynv6w4pn17jg21amfafph9kb5sfvf1idlli8h13y4";
+        sha256 = "036wwrchd5gq3q4k6w1j2bfl2bk2ff8c0dsa9y7w7aw7nf7knwj0";
         authors = [
           "Andrew Gallant <jamslam@gmail.com>"
         ];
@@ -1585,7 +1710,7 @@ rec {
           }
           {
             name = "regex-automata";
-            packageId = "regex-automata 0.4.3";
+            packageId = "regex-automata 0.4.7";
             optional = true;
             usesDefaultFeatures = false;
             features = [ "dfa-search" ];
@@ -1634,9 +1759,9 @@ rec {
       };
       "bytes" = rec {
         crateName = "bytes";
-        version = "1.5.0";
+        version = "1.7.1";
         edition = "2018";
-        sha256 = "08w2i8ac912l8vlvkv3q51cd4gr09pwlg3sjsjffcizlrb0i5gd2";
+        sha256 = "0l5sf69avjxcw41cznyzxsnymwmkpmk08q0sm7fgicvvn0ysa643";
         authors = [
           "Carl Lerche <me@carllerche.com>"
           "Sean McArthur <sean@seanmonstar.com>"
@@ -1736,10 +1861,9 @@ rec {
       };
       "cc" = rec {
         crateName = "cc";
-        version = "1.0.83";
+        version = "1.1.15";
         edition = "2018";
-        crateBin = [ ];
-        sha256 = "1l643zidlb5iy1dskc5ggqs4wqa29a02f44piczqc8zcnsq4y5zi";
+        sha256 = "1rn62w58ba1ylqlp3saj4n0vh1h40ii1r83xr06p80r9m9ss5djp";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -1748,25 +1872,31 @@ rec {
             name = "jobserver";
             packageId = "jobserver";
             optional = true;
+            usesDefaultFeatures = false;
           }
           {
             name = "libc";
             packageId = "libc";
+            optional = true;
             usesDefaultFeatures = false;
             target = { target, features }: (target."unix" or false);
           }
+          {
+            name = "shlex";
+            packageId = "shlex";
+          }
         ];
         features = {
-          "jobserver" = [ "dep:jobserver" ];
-          "parallel" = [ "jobserver" ];
+          "parallel" = [ "dep:libc" "dep:jobserver" ];
         };
-        resolvedDefaultFeatures = [ "jobserver" "parallel" ];
+        resolvedDefaultFeatures = [ "parallel" ];
       };
       "cfg-if" = rec {
         crateName = "cfg-if";
         version = "1.0.0";
         edition = "2018";
         sha256 = "1za0vb97n4brpzpv8lsbnzmq5r8f2b0cpqqr0sy8h5bn751xxwds";
+        libName = "cfg_if";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -1820,7 +1950,7 @@ rec {
           }
           {
             name = "windows-targets";
-            packageId = "windows-targets 0.52.0";
+            packageId = "windows-targets 0.52.6";
             optional = true;
             target = { target, features }: (target."windows" or false);
           }
@@ -1886,6 +2016,7 @@ rec {
         version = "0.2.1";
         edition = "2021";
         sha256 = "0mi6ci27lpz3azksxrvgzl9jc4a3dfr20pjx7y2nkcrjalbikyfd";
+        libName = "ciborium_io";
         authors = [
           "Nathaniel McCallum <npmccallum@profian.com>"
         ];
@@ -1899,6 +2030,7 @@ rec {
         version = "0.2.1";
         edition = "2021";
         sha256 = "0az2vabamfk75m74ylgf6nzqgqgma5yf25bc1ripfg09ri7a5yny";
+        libName = "ciborium_ll";
         authors = [
           "Nathaniel McCallum <npmccallum@profian.com>"
         ];
@@ -1918,10 +2050,10 @@ rec {
       };
       "clap" = rec {
         crateName = "clap";
-        version = "4.4.18";
+        version = "4.5.16";
         edition = "2021";
         crateBin = [ ];
-        sha256 = "0p46h346y8nval6gwzh27if3icbi9dwl95fg5ir36ihrqip8smqy";
+        sha256 = "068hjwbrndn4iz4fsc6d52q4ymg1kfsymjnqbxzdil23zbzijrzd";
         dependencies = [
           {
             name = "clap_builder";
@@ -1949,6 +2081,7 @@ rec {
           "suggestions" = [ "clap_builder/suggestions" ];
           "unicode" = [ "clap_builder/unicode" ];
           "unstable-doc" = [ "clap_builder/unstable-doc" "derive" ];
+          "unstable-ext" = [ "clap_builder/unstable-ext" ];
           "unstable-styles" = [ "clap_builder/unstable-styles" ];
           "unstable-v5" = [ "clap_builder/unstable-v5" "clap_derive?/unstable-v5" "deprecated" ];
           "usage" = [ "clap_builder/usage" ];
@@ -1958,9 +2091,9 @@ rec {
       };
       "clap_builder" = rec {
         crateName = "clap_builder";
-        version = "4.4.18";
+        version = "4.5.15";
         edition = "2021";
-        sha256 = "1iyif47075caa4x1p3ygk18b07lb4xl4k48w4c061i2hxi0dzx2d";
+        sha256 = "1dmas5z20yqmlmfhykr38pn1hkcnr4jzxjw4cs2f6lkn2wmyqsi1";
         dependencies = [
           {
             name = "anstream";
@@ -1977,7 +2110,7 @@ rec {
           }
           {
             name = "strsim";
-            packageId = "strsim";
+            packageId = "strsim 0.11.1";
             optional = true;
           }
         ];
@@ -1988,7 +2121,7 @@ rec {
           "std" = [ "anstyle/std" ];
           "suggestions" = [ "dep:strsim" "error-context" ];
           "unicode" = [ "dep:unicode-width" "dep:unicase" ];
-          "unstable-doc" = [ "cargo" "wrap_help" "env" "unicode" "string" ];
+          "unstable-doc" = [ "cargo" "wrap_help" "env" "unicode" "string" "unstable-ext" ];
           "unstable-styles" = [ "color" ];
           "unstable-v5" = [ "deprecated" ];
           "wrap_help" = [ "help" "dep:terminal_size" ];
@@ -1997,14 +2130,14 @@ rec {
       };
       "clap_derive" = rec {
         crateName = "clap_derive";
-        version = "4.4.7";
+        version = "4.5.13";
         edition = "2021";
-        sha256 = "0hk4hcxl56qwqsf4hmf7c0gr19r9fbxk0ah2bgkr36pmmaph966g";
+        sha256 = "1860xq3rbgwsqwcj9rd14cky9iiywwx86j7fvvngdjixbyfka7ah";
         procMacro = true;
         dependencies = [
           {
             name = "heck";
-            packageId = "heck";
+            packageId = "heck 0.5.0";
           }
           {
             name = "proc-macro2";
@@ -2016,7 +2149,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             features = [ "full" ];
           }
         ];
@@ -2028,9 +2161,9 @@ rec {
       };
       "clap_lex" = rec {
         crateName = "clap_lex";
-        version = "0.6.0";
+        version = "0.7.2";
         edition = "2021";
-        sha256 = "1l8bragdvim7mva9flvd159dskn2bdkpl0jqrr41wnjfn8pcfbvh";
+        sha256 = "15zcrc2fa6ycdzaihxghf48180bnvzsivhf0fmah24bnnaf76qhl";
 
       };
       "clipboard-win" = rec {
@@ -2038,6 +2171,7 @@ rec {
         version = "4.5.0";
         edition = "2018";
         sha256 = "0qh3rypkf1lazniq4nr04hxsck0d55rigb5sjvpvgnap4dyc54bi";
+        libName = "clipboard_win";
         authors = [
           "Douman <douman@gmx.se>"
         ];
@@ -2079,6 +2213,7 @@ rec {
         version = "0.1.2";
         edition = "2015";
         sha256 = "08l1b84bn8r8a72rbvyi2v8a5i0j0kk0a5gr7fb6lmjvw05pf86c";
+        libName = "codemap_diagnostic";
         authors = [
           "Kevin Mehall <km@kevinmehall.net>"
           "The Rust Project Developers"
@@ -2097,9 +2232,9 @@ rec {
       };
       "colorchoice" = rec {
         crateName = "colorchoice";
-        version = "1.0.0";
+        version = "1.0.2";
         edition = "2021";
-        sha256 = "1ix7w85kwvyybwi2jdkl3yva2r2bvdcc3ka2grjfzfgrapqimgxc";
+        sha256 = "1h18ph538y8yjmbpaf8li98l0ifms2xmh3rax9666c5qfjfi3zfk";
 
       };
       "concurrent-queue" = rec {
@@ -2107,6 +2242,7 @@ rec {
         version = "2.4.0";
         edition = "2018";
         sha256 = "0qvk23ynj311adb4z7v89wk3bs65blps4n24q8rgl23vjk6lhq6i";
+        libName = "concurrent_queue";
         authors = [
           "Stjepan Glavina <stjepang@gmail.com>"
           "Taiki Endo <te316e89@gmail.com>"
@@ -2126,11 +2262,53 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "std" ];
       };
+      "console" = rec {
+        crateName = "console";
+        version = "0.15.8";
+        edition = "2018";
+        sha256 = "1sz4nl9nz8pkmapqni6py7jxzi7nzqjxzb3ya4kxvmkb0zy867qf";
+        authors = [
+          "Armin Ronacher <armin.ronacher@active-4.com>"
+        ];
+        dependencies = [
+          {
+            name = "encode_unicode";
+            packageId = "encode_unicode";
+            target = { target, features }: (target."windows" or false);
+          }
+          {
+            name = "lazy_static";
+            packageId = "lazy_static";
+          }
+          {
+            name = "libc";
+            packageId = "libc";
+          }
+          {
+            name = "unicode-width";
+            packageId = "unicode-width";
+            optional = true;
+          }
+          {
+            name = "windows-sys";
+            packageId = "windows-sys 0.52.0";
+            target = { target, features }: (target."windows" or false);
+            features = [ "Win32_Foundation" "Win32_System_Console" "Win32_Storage_FileSystem" "Win32_UI_Input_KeyboardAndMouse" ];
+          }
+        ];
+        features = {
+          "default" = [ "unicode-width" "ansi-parsing" ];
+          "unicode-width" = [ "dep:unicode-width" ];
+          "windows-console-colors" = [ "ansi-parsing" ];
+        };
+        resolvedDefaultFeatures = [ "ansi-parsing" "unicode-width" ];
+      };
       "const-oid" = rec {
         crateName = "const-oid";
         version = "0.9.6";
         edition = "2021";
         sha256 = "1y0jnqaq7p2wvspnx7qj76m7hjcqpz73qzvr9l2p9n2s51vr6if2";
+        libName = "const_oid";
         authors = [
           "RustCrypto Developers"
         ];
@@ -2153,6 +2331,7 @@ rec {
         version = "0.9.4";
         edition = "2018";
         sha256 = "13zvbbj07yk3b61b8fhwfzhy35535a583irf23vlcg59j7h9bqci";
+        libName = "core_foundation";
         authors = [
           "The Servo Project Developers"
         ];
@@ -2184,6 +2363,7 @@ rec {
         version = "0.8.6";
         edition = "2018";
         sha256 = "13w6sdf06r0hn7bx2b45zxsg1mm2phz34jikm6xc5qrbr6djpsh6";
+        libName = "core_foundation_sys";
         authors = [
           "The Servo Project Developers"
         ];
@@ -2197,6 +2377,7 @@ rec {
         version = "0.1.0";
         edition = "2018";
         sha256 = "11bswmgr81s3jagdci1pr6qh9vnz9zsbbf2dqpi260daa2mhgmff";
+        libName = "count_write";
         authors = [
           "SOFe <sofe2038@gmail.com>"
         ];
@@ -2235,7 +2416,7 @@ rec {
           {
             name = "libc";
             packageId = "libc";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-linux-android");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "aarch64-linux-android");
           }
           {
             name = "libc";
@@ -2392,6 +2573,7 @@ rec {
         version = "0.5.0";
         edition = "2018";
         sha256 = "1c866xkjqqhzg4cjvg01f8w6xc1j3j7s58rdksl52skq89iq4l3b";
+        libName = "criterion_plot";
         authors = [
           "Jorge Aparicio <japaricious@gmail.com>"
           "Brook Heisler <brookheisler@gmail.com>"
@@ -2413,6 +2595,7 @@ rec {
         version = "0.5.11";
         edition = "2021";
         sha256 = "16v48qdflpw3hgdik70bhsj7hympna79q7ci47rw0mlgnxsw2v8p";
+        libName = "crossbeam_channel";
         dependencies = [
           {
             name = "crossbeam-utils";
@@ -2431,6 +2614,7 @@ rec {
         version = "0.8.5";
         edition = "2021";
         sha256 = "03bp38ljx4wj6vvy4fbhx41q8f585zyqix6pncz1mkz93z08qgv1";
+        libName = "crossbeam_deque";
         dependencies = [
           {
             name = "crossbeam-epoch";
@@ -2454,6 +2638,7 @@ rec {
         version = "0.9.18";
         edition = "2021";
         sha256 = "03j2np8llwf376m3fxqx859mgp9f83hj1w34153c7a9c7i5ar0jv";
+        libName = "crossbeam_epoch";
         dependencies = [
           {
             name = "crossbeam-utils";
@@ -2475,6 +2660,7 @@ rec {
         version = "0.8.19";
         edition = "2021";
         sha256 = "0iakrb1b8fjqrag7wphl94d10irhbh2fw1g444xslsywqyn3p3i4";
+        libName = "crossbeam_utils";
         features = {
           "default" = [ "std" ];
           "loom" = [ "dep:loom" ];
@@ -2486,6 +2672,7 @@ rec {
         version = "0.1.6";
         edition = "2018";
         sha256 = "1cvby95a6xg7kxdz5ln3rl9xh66nz66w46mm3g56ri1z5x815yqv";
+        libName = "crypto_common";
         authors = [
           "RustCrypto Developers"
         ];
@@ -2511,6 +2698,7 @@ rec {
         version = "4.1.1";
         edition = "2021";
         sha256 = "0p7ns5917k6369gajrsbfj24llc5zfm635yh3abla7sb5rm8r6z8";
+        libName = "curve25519_dalek";
         authors = [
           "Isis Lovecruft <isis@patternsinthevoid.net>"
           "Henry de Valence <hdevalence@hdevalence.ca>"
@@ -2583,6 +2771,7 @@ rec {
         edition = "2021";
         sha256 = "1cry71xxrr0mcy5my3fb502cwfxy6822k4pm19cwrilrg7hq4s7l";
         procMacro = true;
+        libName = "curve25519_dalek_derive";
         dependencies = [
           {
             name = "proc-macro2";
@@ -2594,7 +2783,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             features = [ "full" ];
           }
         ];
@@ -2652,12 +2841,12 @@ rec {
           }
           {
             name = "strsim";
-            packageId = "strsim";
+            packageId = "strsim 0.10.0";
             optional = true;
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             features = [ "full" "extra-traits" ];
           }
         ];
@@ -2687,16 +2876,17 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
           }
         ];
 
       };
       "data-encoding" = rec {
         crateName = "data-encoding";
-        version = "2.5.0";
+        version = "2.6.0";
         edition = "2018";
-        sha256 = "1rcbnwfmfxhlshzbn3r7srm3azqha3mn33yxyqxkzz2wpqcjm5ky";
+        sha256 = "1qnn68n4vragxaxlkqcb1r28d3hhj43wch67lm4rpxlw89wnjmp8";
+        libName = "data_encoding";
         authors = [
           "Julien Cretin <git@ia0.eu>"
         ];
@@ -2845,6 +3035,7 @@ rec {
         version = "2.0.0";
         edition = "2018";
         sha256 = "1q9kr151h9681wwp6is18750ssghz6j9j7qm7qi1ngcwy7mzi35r";
+        libName = "dirs_next";
         authors = [
           "The @xdg-rs members"
         ];
@@ -2865,6 +3056,7 @@ rec {
         version = "0.3.7";
         edition = "2015";
         sha256 = "19md1cnkazham8a6kh22v12d8hh3raqahfk6yb043vrjr68is78v";
+        libName = "dirs_sys";
         authors = [
           "Simon Ochsenreither <simon@ochsenreither.de>"
         ];
@@ -2894,6 +3086,7 @@ rec {
         version = "0.1.2";
         edition = "2018";
         sha256 = "0kavhavdxv4phzj4l0psvh55hszwnr0rcz8sxbvx20pyqi2a3gaf";
+        libName = "dirs_sys_next";
         authors = [
           "The @xdg-rs members"
         ];
@@ -2918,6 +3111,16 @@ rec {
         ];
 
       };
+      "dissimilar" = rec {
+        crateName = "dissimilar";
+        version = "1.0.9";
+        edition = "2018";
+        sha256 = "0bcn4s99ghigd3yadpd7i3gljv5z2hkr07ijvvxvsxmz3yfygy2r";
+        authors = [
+          "David Tolnay <dtolnay@gmail.com>"
+        ];
+
+      };
       "doc-comment" = rec {
         crateName = "doc-comment";
         version = "0.3.3";
@@ -2935,6 +3138,7 @@ rec {
         edition = "2018";
         sha256 = "15cvgxqngxslgllz15m8aban6wqfgsi6nlhr0g25yfsnd6nq4lpg";
         procMacro = true;
+        libName = "document_features";
         libPath = "lib.rs";
         authors = [
           "Slint Developers <info@slint-ui.com>"
@@ -2983,9 +3187,10 @@ rec {
       };
       "ed25519-dalek" = rec {
         crateName = "ed25519-dalek";
-        version = "2.1.0";
+        version = "2.1.1";
         edition = "2021";
-        sha256 = "1h13qm789m9gdjl6jazss80hqi8ll37m0afwcnw23zcbqjp8wqhz";
+        sha256 = "0w88cafwglg9hjizldbmlza0ns3hls81zk1bcih3m5m3h67algaa";
+        libName = "ed25519_dalek";
         authors = [
           "isis lovecruft <isis@patternsinthevoid.net>"
           "Tony Arcieri <bascule@gmail.com>"
@@ -3070,36 +3275,29 @@ rec {
           "default" = [ "use_std" ];
           "serde" = [ "dep:serde" ];
         };
-        resolvedDefaultFeatures = [ "default" "use_std" ];
+        resolvedDefaultFeatures = [ "use_std" ];
       };
-      "encoding_rs" = rec {
-        crateName = "encoding_rs";
-        version = "0.8.33";
-        edition = "2018";
-        sha256 = "1qa5k4a0ipdrxq4xg9amms9r9pnnfn7nfh2i9m3mw0ka563b6s3j";
+      "encode_unicode" = rec {
+        crateName = "encode_unicode";
+        version = "0.3.6";
+        edition = "2015";
+        sha256 = "07w3vzrhxh9lpjgsg2y5bwzfar2aq35mdznvcp3zjl0ssj7d4mx3";
         authors = [
-          "Henri Sivonen <hsivonen@hsivonen.fi>"
-        ];
-        dependencies = [
-          {
-            name = "cfg-if";
-            packageId = "cfg-if";
-          }
+          "Torbjรธrn Birch Moltu <t.b.moltu@lyse.net>"
         ];
         features = {
-          "default" = [ "alloc" ];
-          "fast-legacy-encode" = [ "fast-hangul-encode" "fast-hanja-encode" "fast-kanji-encode" "fast-gb-hanzi-encode" "fast-big5-hanzi-encode" ];
-          "packed_simd" = [ "dep:packed_simd" ];
-          "serde" = [ "dep:serde" ];
-          "simd-accel" = [ "packed_simd" "packed_simd/into_bits" ];
+          "ascii" = [ "dep:ascii" ];
+          "clippy" = [ "dep:clippy" ];
+          "default" = [ "std" ];
         };
-        resolvedDefaultFeatures = [ "alloc" "default" ];
+        resolvedDefaultFeatures = [ "default" "std" ];
       };
       "endian-type" = rec {
         crateName = "endian-type";
         version = "0.1.2";
         edition = "2015";
         sha256 = "0bbh88zaig1jfqrm7w3gx0pz81kw2jakk3055vbgapw3dmk08ky3";
+        libName = "endian_type";
         authors = [
           "Lolirofle <lolipopple@hotmail.com>"
         ];
@@ -3111,6 +3309,7 @@ rec {
         edition = "2018";
         sha256 = "0k6wcf58h5kh64yq5nfq71va53kaya0kzxwsjwbgwm2n2zd9axxs";
         procMacro = true;
+        libName = "enum_primitive_derive";
         authors = [
           "Doug Goldstein <cardoe@cardoe.com>"
         ];
@@ -3126,7 +3325,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
           }
         ];
 
@@ -3140,9 +3339,10 @@ rec {
       };
       "erased-serde" = rec {
         crateName = "erased-serde";
-        version = "0.4.4";
+        version = "0.4.5";
         edition = "2021";
-        sha256 = "1lx0si6iljzmfpblhn4b0ip3kw2yv4vjyca0riqz3ix311q80wrb";
+        sha256 = "13dirfj9972nvk05b20w3xyn3xp1j6qyfp9avhksnkxbcnfkiqi4";
+        libName = "erased_serde";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -3152,13 +3352,17 @@ rec {
             packageId = "serde";
             usesDefaultFeatures = false;
           }
+          {
+            name = "typeid";
+            packageId = "typeid";
+          }
         ];
         features = {
           "alloc" = [ "serde/alloc" ];
           "default" = [ "std" ];
           "std" = [ "alloc" "serde/std" ];
         };
-        resolvedDefaultFeatures = [ "alloc" ];
+        resolvedDefaultFeatures = [ "alloc" "default" "std" ];
       };
       "errno" = rec {
         crateName = "errno";
@@ -3205,6 +3409,7 @@ rec {
         version = "2.3.1";
         edition = "2018";
         sha256 = "08baxlf8qz01lgjsdbfhs193r9y1nlc566s5xvzyf4dzwy8qkwb4";
+        libName = "error_code";
         authors = [
           "Douman <douman@gmx.se>"
         ];
@@ -3226,6 +3431,7 @@ rec {
         version = "2.5.3";
         edition = "2018";
         sha256 = "1q4w3pndc518crld6zsqvvpy9lkzwahp2zgza9kbzmmqh9gif1h2";
+        libName = "event_listener";
         authors = [
           "Stjepan Glavina <stjepang@gmail.com>"
         ];
@@ -3236,6 +3442,7 @@ rec {
         version = "4.0.3";
         edition = "2021";
         sha256 = "0vk4smw1vf871vi76af1zn7w69jg3zmpjddpby2qq91bkg21bck7";
+        libName = "event_listener";
         authors = [
           "Stjepan Glavina <stjepang@gmail.com>"
         ];
@@ -3271,6 +3478,7 @@ rec {
         version = "5.2.0";
         edition = "2021";
         sha256 = "14fcnjgpfl22645nhc3hzkdq3a1v0srqacc3kfassg7sjj8vhprb";
+        libName = "event_listener";
         authors = [
           "Stjepan Glavina <stjepang@gmail.com>"
         ];
@@ -3306,6 +3514,7 @@ rec {
         version = "0.4.0";
         edition = "2018";
         sha256 = "1lwprdjqp2ibbxhgm9khw7s7y7k4xiqj5i5yprqiks6mnrq4v3lm";
+        libName = "event_listener_strategy";
         authors = [
           "John Nunley <dev@notgull.net>"
         ];
@@ -3331,6 +3540,7 @@ rec {
         version = "0.5.0";
         edition = "2021";
         sha256 = "148jflvjrq0zrr3dx3srv88jksj1klm4amy3b9fifjdpm75azvgy";
+        libName = "event_listener_strategy";
         authors = [
           "John Nunley <dev@notgull.net>"
         ];
@@ -3351,6 +3561,27 @@ rec {
         };
         resolvedDefaultFeatures = [ "std" ];
       };
+      "expect-test" = rec {
+        crateName = "expect-test";
+        version = "1.5.0";
+        edition = "2018";
+        sha256 = "1q55nrkgzg345905aqbsdrwlq4sk0gjn4z5bdph1an1kc6jy02wy";
+        libName = "expect_test";
+        authors = [
+          "rust-analyzer developers"
+        ];
+        dependencies = [
+          {
+            name = "dissimilar";
+            packageId = "dissimilar";
+          }
+          {
+            name = "once_cell";
+            packageId = "once_cell";
+          }
+        ];
+
+      };
       "fastcdc" = rec {
         crateName = "fastcdc";
         version = "3.1.0";
@@ -3413,6 +3644,7 @@ rec {
         version = "3.0.13";
         edition = "2018";
         sha256 = "1df1jdncda67g65hrnmd2zsl7q5hdn8cm84chdalxndsx7akw0zg";
+        libName = "fd_lock";
         authors = [
           "Yoshua Wuyts <yoshuawuyts@gmail.com>"
         ];
@@ -3441,6 +3673,7 @@ rec {
         version = "0.2.5";
         edition = "2018";
         sha256 = "1dxn0g50pv0ppal779vi7k40fr55pbhkyv4in7i13pgl4sn3wmr7";
+        libName = "fiat_crypto";
         authors = [
           "Fiat Crypto library authors <jgross@mit.edu>"
         ];
@@ -3603,6 +3836,7 @@ rec {
         version = "0.11.0";
         edition = "2018";
         sha256 = "0jyldvp0kvjk21j5vqga42lkksaf7zg8jkj3l6h2dv20kyl66nif";
+        libName = "fuse_backend_rs";
         authors = [
           "Liu Bo <bo.liu@linux.alibaba.com>"
           "Liu Jiang <gerry@linux.alibaba.com>"
@@ -3643,7 +3877,7 @@ rec {
           }
           {
             name = "mio";
-            packageId = "mio";
+            packageId = "mio 0.8.11";
             features = [ "os-poll" "os-ext" ];
           }
           {
@@ -3771,6 +4005,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "0y6b7xxqdjm9hlcjpakcg41qfl7lihf6gavk8fyqijsxhvbzgj7a";
+        libName = "futures_channel";
         dependencies = [
           {
             name = "futures-core";
@@ -3798,6 +4033,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "07aslayrn3lbggj54kci0ishmd1pr367fp7iks7adia1p05miinz";
+        libName = "futures_core";
         features = {
           "default" = [ "std" ];
           "portable-atomic" = [ "dep:portable-atomic" ];
@@ -3810,6 +4046,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "07dh08gs9vfll2h36kq32q9xd86xm6lyl9xikmmwlkqnmrrgqxm5";
+        libName = "futures_executor";
         dependencies = [
           {
             name = "futures-core";
@@ -3840,6 +4077,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "1hgh25isvsr4ybibywhr4dpys8mjnscw4wfxxwca70cn1gi26im4";
+        libName = "futures_io";
         features = {
           "default" = [ "std" ];
         };
@@ -3850,6 +4088,7 @@ rec {
         version = "2.3.0";
         edition = "2021";
         sha256 = "19gk4my8zhfym6gwnpdjiyv2hw8cc098skkbkhryjdaf0yspwljj";
+        libName = "futures_lite";
         authors = [
           "Stjepan Glavina <stjepang@gmail.com>"
           "Contributors to futures-rs"
@@ -3898,6 +4137,7 @@ rec {
         edition = "2018";
         sha256 = "1b49qh9d402y8nka4q6wvvj0c88qq91wbr192mdn5h54nzs0qxc7";
         procMacro = true;
+        libName = "futures_macro";
         dependencies = [
           {
             name = "proc-macro2";
@@ -3909,7 +4149,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             features = [ "full" ];
           }
         ];
@@ -3920,6 +4160,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "1dag8xyyaya8n8mh8smx7x6w2dpmafg2din145v973a3hw7f1f4z";
+        libName = "futures_sink";
         features = {
           "default" = [ "std" ];
           "std" = [ "alloc" ];
@@ -3931,6 +4172,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "013h1724454hj8qczp8vvs10qfiqrxr937qsrv6rhii68ahlzn1q";
+        libName = "futures_task";
         features = {
           "default" = [ "std" ];
           "std" = [ "alloc" ];
@@ -3942,6 +4184,7 @@ rec {
         version = "3.0.2";
         edition = "2018";
         sha256 = "0b5v7lk9838ix6jdcrainsyrh7xrf24pwm61dp13907qkn806jz6";
+        libName = "futures_timer";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -3956,6 +4199,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "0j0xqhcir1zf2dcbpd421kgw6wvsk0rpxflylcysn1rlp3g02r1x";
+        libName = "futures_util";
         dependencies = [
           {
             name = "futures-channel";
@@ -4055,9 +4299,9 @@ rec {
       };
       "gcp_auth" = rec {
         crateName = "gcp_auth";
-        version = "0.10.0";
+        version = "0.12.2";
         edition = "2021";
-        sha256 = "1m7lsh2gc7n9p0gs9k2qbxsrvchw1vz6dyz9a2ma322vd3m72b6y";
+        sha256 = "0gb9bp9nkc810kfycm4vxndpccbhx68lcirq0y06lafykpkpjv2k";
         dependencies = [
           {
             name = "async-trait";
@@ -4065,7 +4309,11 @@ rec {
           }
           {
             name = "base64";
-            packageId = "base64";
+            packageId = "base64 0.22.1";
+          }
+          {
+            name = "bytes";
+            packageId = "bytes";
           }
           {
             name = "chrono";
@@ -4077,27 +4325,37 @@ rec {
             packageId = "home";
           }
           {
+            name = "http";
+            packageId = "http";
+          }
+          {
+            name = "http-body-util";
+            packageId = "http-body-util";
+          }
+          {
             name = "hyper";
             packageId = "hyper";
-            features = [ "client" "runtime" "http2" ];
+            usesDefaultFeatures = false;
+            features = [ "client" "http1" "http2" ];
           }
           {
             name = "hyper-rustls";
             packageId = "hyper-rustls";
             usesDefaultFeatures = false;
-            features = [ "tokio-runtime" "http1" "http2" ];
+            features = [ "http1" "http2" ];
           }
           {
-            name = "ring";
-            packageId = "ring";
+            name = "hyper-util";
+            packageId = "hyper-util";
+            features = [ "client-legacy" ];
           }
           {
-            name = "rustls";
-            packageId = "rustls 0.21.12";
+            name = "ring";
+            packageId = "ring";
           }
           {
             name = "rustls-pemfile";
-            packageId = "rustls-pemfile 1.0.4";
+            packageId = "rustls-pemfile";
           }
           {
             name = "serde";
@@ -4129,10 +4387,6 @@ rec {
             name = "url";
             packageId = "url";
           }
-          {
-            name = "which";
-            packageId = "which 5.0.0";
-          }
         ];
         devDependencies = [
           {
@@ -4142,7 +4396,7 @@ rec {
           }
         ];
         features = {
-          "default" = [ "hyper-rustls/rustls-native-certs" ];
+          "default" = [ "hyper-rustls/rustls-native-certs" "hyper-rustls/ring" ];
           "webpki-roots" = [ "hyper-rustls/webpki-roots" ];
         };
         resolvedDefaultFeatures = [ "default" ];
@@ -4175,11 +4429,53 @@ rec {
         version = "0.99.1";
         edition = "2018";
         sha256 = "1g6zmr88fk48f1ksz9ik1i2mwjsiam9s4p9aybhvs2zwzphxychb";
+        libName = "genawaiter_macro";
         authors = [
           "Devin R <devin.ragotzy@gmail.com>"
         ];
         features = { };
       };
+      "generator" = rec {
+        crateName = "generator";
+        version = "0.8.1";
+        edition = "2021";
+        sha256 = "1yw3rxbfq5a3yzrg88pdln2lvi9014zg1mpq1q4x0cf27gai8q0q";
+        authors = [
+          "Xudong Huang <huangxu008@hotmail.com>"
+        ];
+        dependencies = [
+          {
+            name = "cfg-if";
+            packageId = "cfg-if";
+          }
+          {
+            name = "libc";
+            packageId = "libc";
+            target = { target, features }: (target."unix" or false);
+          }
+          {
+            name = "log";
+            packageId = "log";
+          }
+          {
+            name = "windows";
+            packageId = "windows";
+            target = { target, features }: (target."windows" or false);
+            features = [ "Win32_System_Memory" "Win32_System_Kernel" "Win32_Foundation" "Win32_System_SystemInformation" "Win32_System_Diagnostics_Debug" ];
+          }
+        ];
+        buildDependencies = [
+          {
+            name = "cc";
+            packageId = "cc";
+          }
+          {
+            name = "rustversion";
+            packageId = "rustversion";
+          }
+        ];
+
+      };
       "generic-array" = rec {
         crateName = "generic-array";
         version = "0.14.7";
@@ -4222,6 +4518,12 @@ rec {
             packageId = "cfg-if";
           }
           {
+            name = "js-sys";
+            packageId = "js-sys";
+            optional = true;
+            target = { target, features }: ((("wasm32" == target."arch" or null) || ("wasm64" == target."arch" or null)) && ("unknown" == target."os" or null));
+          }
+          {
             name = "libc";
             packageId = "libc";
             usesDefaultFeatures = false;
@@ -4233,6 +4535,13 @@ rec {
             usesDefaultFeatures = false;
             target = { target, features }: ("wasi" == target."os" or null);
           }
+          {
+            name = "wasm-bindgen";
+            packageId = "wasm-bindgen";
+            optional = true;
+            usesDefaultFeatures = false;
+            target = { target, features }: ((("wasm32" == target."arch" or null) || ("wasm64" == target."arch" or null)) && ("unknown" == target."os" or null));
+          }
         ];
         features = {
           "compiler_builtins" = [ "dep:compiler_builtins" ];
@@ -4242,7 +4551,7 @@ rec {
           "rustc-dep-of-std" = [ "compiler_builtins" "core" "libc/rustc-dep-of-std" "wasi/rustc-dep-of-std" ];
           "wasm-bindgen" = [ "dep:wasm-bindgen" ];
         };
-        resolvedDefaultFeatures = [ "std" ];
+        resolvedDefaultFeatures = [ "js" "js-sys" "std" "wasm-bindgen" ];
       };
       "gimli" = rec {
         crateName = "gimli";
@@ -4273,9 +4582,9 @@ rec {
       };
       "h2" = rec {
         crateName = "h2";
-        version = "0.3.26";
-        edition = "2018";
-        sha256 = "1s7msnfv7xprzs6xzfj5sg6p8bjcdpcqcmjjbkd345cyi1x55zl1";
+        version = "0.4.4";
+        edition = "2021";
+        sha256 = "0sc0ymhiqp4hbz39d405cjbga77wnz2pprbgyc498xs58hlwfvl1";
         authors = [
           "Carl Lerche <me@carllerche.com>"
           "Sean McArthur <sean@seanmonstar.com>"
@@ -4421,7 +4730,7 @@ rec {
         };
         resolvedDefaultFeatures = [ "ahash" "allocator-api2" "default" "inline-more" "raw" ];
       };
-      "heck" = rec {
+      "heck 0.4.1" = rec {
         crateName = "heck";
         version = "0.4.1";
         edition = "2018";
@@ -4435,11 +4744,19 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" ];
       };
+      "heck 0.5.0" = rec {
+        crateName = "heck";
+        version = "0.5.0";
+        edition = "2021";
+        sha256 = "1sjmpsdl8czyh9ywl3qcsfsq9a307dg4ni2vnlwgnzzqhc4y0113";
+
+      };
       "hermit-abi" = rec {
         crateName = "hermit-abi";
-        version = "0.3.4";
+        version = "0.3.9";
         edition = "2021";
-        sha256 = "07v5vbwb9kx0yxgdpx15h38ynpzhaqx5ncriryipypi5707hwgax";
+        sha256 = "092hxjbjnq5fmz66grd9plxd0sh6ssg5fhgwwwqbrzgzkjwdycfj";
+        libName = "hermit_abi";
         authors = [
           "Stefan Lankes"
         ];
@@ -4471,6 +4788,7 @@ rec {
         version = "0.4.1";
         edition = "2021";
         sha256 = "0iny5inkixsdr41pm2vkqh3fl66752z5j5c0cdxw16yl9ryjdqkg";
+        libName = "hex_literal";
         authors = [
           "RustCrypto Developers"
         ];
@@ -4496,9 +4814,9 @@ rec {
       };
       "http" = rec {
         crateName = "http";
-        version = "0.2.11";
+        version = "1.1.0";
         edition = "2018";
-        sha256 = "1fwz3mhh86h5kfnr5767jlx9agpdggclq7xsqx930fflzakb2iw9";
+        sha256 = "0n426lmcxas6h75c2cp25m933pswlrfjz10v91vc62vib2sdvf91";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
           "Carl Lerche <me@carllerche.com>"
@@ -4518,13 +4836,17 @@ rec {
             packageId = "itoa";
           }
         ];
-
+        features = {
+          "default" = [ "std" ];
+        };
+        resolvedDefaultFeatures = [ "default" "std" ];
       };
       "http-body" = rec {
         crateName = "http-body";
-        version = "0.4.6";
+        version = "1.0.0";
         edition = "2018";
-        sha256 = "1lmyjfk6bqk6k9gkn1dxq770sb78pqbqshga241hr5p995bb5skw";
+        sha256 = "0hyn8n3iadrbwq8y0p1rl1275s4nm49bllw5wji29g4aa3dqbb0w";
+        libName = "http_body";
         authors = [
           "Carl Lerche <me@carllerche.com>"
           "Lucio Franco <luciofranco14@gmail.com>"
@@ -4539,6 +4861,37 @@ rec {
             name = "http";
             packageId = "http";
           }
+        ];
+
+      };
+      "http-body-util" = rec {
+        crateName = "http-body-util";
+        version = "0.1.1";
+        edition = "2018";
+        sha256 = "07agldas2qgcfc05ckiarlmf9vzragbda823nqhrqrc6mjrghx84";
+        libName = "http_body_util";
+        authors = [
+          "Carl Lerche <me@carllerche.com>"
+          "Lucio Franco <luciofranco14@gmail.com>"
+          "Sean McArthur <sean@seanmonstar.com>"
+        ];
+        dependencies = [
+          {
+            name = "bytes";
+            packageId = "bytes";
+          }
+          {
+            name = "futures-core";
+            packageId = "futures-core";
+          }
+          {
+            name = "http";
+            packageId = "http";
+          }
+          {
+            name = "http-body";
+            packageId = "http-body";
+          }
           {
             name = "pin-project-lite";
             packageId = "pin-project-lite";
@@ -4581,9 +4934,9 @@ rec {
       };
       "hyper" = rec {
         crateName = "hyper";
-        version = "0.14.28";
-        edition = "2018";
-        sha256 = "107gkvqx4h9bl17d602zkm2dgpfq86l2dr36yzfsi8l3xcsy35mz";
+        version = "1.4.1";
+        edition = "2021";
+        sha256 = "01ds8i3q6hw5kw56mavy544m11gkr87zi999siigdl3n1qpd5psh";
         authors = [
           "Sean McArthur <sean@seanmonstar.com>"
         ];
@@ -4595,15 +4948,12 @@ rec {
           {
             name = "futures-channel";
             packageId = "futures-channel";
-          }
-          {
-            name = "futures-core";
-            packageId = "futures-core";
-            usesDefaultFeatures = false;
+            optional = true;
           }
           {
             name = "futures-util";
             packageId = "futures-util";
+            optional = true;
             usesDefaultFeatures = false;
           }
           {
@@ -4622,24 +4972,28 @@ rec {
           {
             name = "httparse";
             packageId = "httparse";
+            optional = true;
           }
           {
             name = "httpdate";
             packageId = "httpdate";
+            optional = true;
           }
           {
             name = "itoa";
             packageId = "itoa";
+            optional = true;
           }
           {
             name = "pin-project-lite";
             packageId = "pin-project-lite";
+            optional = true;
           }
           {
-            name = "socket2";
-            packageId = "socket2";
+            name = "smallvec";
+            packageId = "smallvec";
             optional = true;
-            features = [ "all" ];
+            features = [ "const_generics" "const_new" ];
           }
           {
             name = "tokio";
@@ -4647,50 +5001,46 @@ rec {
             features = [ "sync" ];
           }
           {
-            name = "tower-service";
-            packageId = "tower-service";
-          }
-          {
-            name = "tracing";
-            packageId = "tracing";
-            usesDefaultFeatures = false;
-            features = [ "std" ];
-          }
-          {
             name = "want";
             packageId = "want";
+            optional = true;
           }
         ];
         devDependencies = [
           {
+            name = "futures-channel";
+            packageId = "futures-channel";
+            features = [ "sink" ];
+          }
+          {
             name = "futures-util";
             packageId = "futures-util";
             usesDefaultFeatures = false;
-            features = [ "alloc" ];
+            features = [ "alloc" "sink" ];
           }
           {
             name = "tokio";
             packageId = "tokio";
-            features = [ "fs" "macros" "io-std" "io-util" "rt" "rt-multi-thread" "sync" "time" "test-util" ];
+            features = [ "fs" "macros" "net" "io-std" "io-util" "rt" "rt-multi-thread" "sync" "time" "test-util" ];
           }
         ];
         features = {
-          "ffi" = [ "libc" ];
-          "full" = [ "client" "http1" "http2" "server" "stream" "runtime" ];
-          "h2" = [ "dep:h2" ];
-          "http2" = [ "h2" ];
-          "libc" = [ "dep:libc" ];
-          "runtime" = [ "tcp" "tokio/rt" "tokio/time" ];
-          "socket2" = [ "dep:socket2" ];
-          "tcp" = [ "socket2" "tokio/net" "tokio/rt" "tokio/time" ];
+          "client" = [ "dep:want" "dep:pin-project-lite" "dep:smallvec" ];
+          "ffi" = [ "dep:libc" "dep:http-body-util" "futures-util?/alloc" ];
+          "full" = [ "client" "http1" "http2" "server" ];
+          "http1" = [ "dep:futures-channel" "dep:futures-util" "dep:httparse" "dep:itoa" ];
+          "http2" = [ "dep:futures-channel" "dep:futures-util" "dep:h2" ];
+          "server" = [ "dep:httpdate" "dep:pin-project-lite" "dep:smallvec" ];
+          "tracing" = [ "dep:tracing" ];
         };
-        resolvedDefaultFeatures = [ "client" "default" "full" "h2" "http1" "http2" "runtime" "server" "socket2" "stream" "tcp" ];
+        resolvedDefaultFeatures = [ "client" "default" "http1" "http2" "server" ];
       };
       "hyper-rustls" = rec {
         crateName = "hyper-rustls";
-        version = "0.24.2";
+        version = "0.27.2";
         edition = "2021";
-        sha256 = "1475j4a2nczz4aajzzsq3hpwg1zacmzbqg393a14j80ff8izsgpc";
+        sha256 = "0ma1wyfnqnkz7zyr7wpply3xfvlijd0rqqhb6ajs28c9jhnbxr2y";
+        libName = "hyper_rustls";
         dependencies = [
           {
             name = "futures-util";
@@ -4705,37 +5055,52 @@ rec {
             name = "hyper";
             packageId = "hyper";
             usesDefaultFeatures = false;
-            features = [ "client" ];
+          }
+          {
+            name = "hyper-util";
+            packageId = "hyper-util";
+            usesDefaultFeatures = false;
+            features = [ "client-legacy" "tokio" ];
           }
           {
             name = "rustls";
-            packageId = "rustls 0.21.12";
+            packageId = "rustls";
             usesDefaultFeatures = false;
           }
           {
             name = "rustls-native-certs";
-            packageId = "rustls-native-certs 0.6.3";
+            packageId = "rustls-native-certs";
             optional = true;
           }
           {
+            name = "rustls-pki-types";
+            packageId = "rustls-pki-types";
+            rename = "pki-types";
+          }
+          {
             name = "tokio";
             packageId = "tokio";
           }
           {
             name = "tokio-rustls";
-            packageId = "tokio-rustls 0.24.1";
+            packageId = "tokio-rustls";
             usesDefaultFeatures = false;
           }
+          {
+            name = "tower-service";
+            packageId = "tower-service";
+          }
         ];
         devDependencies = [
           {
-            name = "hyper";
-            packageId = "hyper";
-            features = [ "full" ];
+            name = "hyper-util";
+            packageId = "hyper-util";
+            usesDefaultFeatures = false;
+            features = [ "server-auto" ];
           }
           {
             name = "rustls";
-            packageId = "rustls 0.21.12";
+            packageId = "rustls";
             usesDefaultFeatures = false;
             features = [ "tls12" ];
           }
@@ -4746,26 +5111,29 @@ rec {
           }
         ];
         features = {
-          "acceptor" = [ "hyper/server" "tokio-runtime" ];
-          "default" = [ "native-tokio" "http1" "tls12" "logging" "acceptor" ];
-          "http1" = [ "hyper/http1" ];
-          "http2" = [ "hyper/http2" ];
+          "aws-lc-rs" = [ "rustls/aws_lc_rs" ];
+          "default" = [ "native-tokio" "http1" "tls12" "logging" "aws-lc-rs" ];
+          "fips" = [ "aws-lc-rs" "rustls/fips" ];
+          "http1" = [ "hyper-util/http1" ];
+          "http2" = [ "hyper-util/http2" ];
           "log" = [ "dep:log" ];
           "logging" = [ "log" "tokio-rustls/logging" "rustls/logging" ];
-          "native-tokio" = [ "tokio-runtime" "rustls-native-certs" ];
+          "native-tokio" = [ "rustls-native-certs" ];
+          "ring" = [ "rustls/ring" ];
           "rustls-native-certs" = [ "dep:rustls-native-certs" ];
+          "rustls-platform-verifier" = [ "dep:rustls-platform-verifier" ];
           "tls12" = [ "tokio-rustls/tls12" "rustls/tls12" ];
-          "tokio-runtime" = [ "hyper/runtime" ];
           "webpki-roots" = [ "dep:webpki-roots" ];
-          "webpki-tokio" = [ "tokio-runtime" "webpki-roots" ];
+          "webpki-tokio" = [ "webpki-roots" ];
         };
-        resolvedDefaultFeatures = [ "http1" "http2" "rustls-native-certs" "tokio-runtime" ];
+        resolvedDefaultFeatures = [ "http1" "http2" "native-tokio" "ring" "rustls-native-certs" "tls12" ];
       };
       "hyper-timeout" = rec {
         crateName = "hyper-timeout";
-        version = "0.4.1";
+        version = "0.5.1";
         edition = "2018";
-        sha256 = "1c8k3g8k2yh1gxvsx9p7amkimgxhl9kafwpj7jyf8ywc5r45ifdv";
+        sha256 = "14rpyv9zz0ncadn9qgmnjz0hiqk3nav7hglkk1a6yfy8wmhsj0rj";
+        libName = "hyper_timeout";
         authors = [
           "Herman J. Radtke III <herman@hermanradtke.com>"
         ];
@@ -4773,7 +5141,11 @@ rec {
           {
             name = "hyper";
             packageId = "hyper";
-            features = [ "client" ];
+          }
+          {
+            name = "hyper-util";
+            packageId = "hyper-util";
+            features = [ "client-legacy" "http1" ];
           }
           {
             name = "pin-project-lite";
@@ -4784,15 +5156,15 @@ rec {
             packageId = "tokio";
           }
           {
-            name = "tokio-io-timeout";
-            packageId = "tokio-io-timeout";
+            name = "tower-service";
+            packageId = "tower-service";
           }
         ];
         devDependencies = [
           {
             name = "hyper";
             packageId = "hyper";
-            features = [ "client" "http1" "tcp" ];
+            features = [ "http1" ];
           }
           {
             name = "tokio";
@@ -4802,11 +5174,114 @@ rec {
         ];
 
       };
+      "hyper-util" = rec {
+        crateName = "hyper-util";
+        version = "0.1.7";
+        edition = "2021";
+        sha256 = "1fg9h591skksq5zxnffyisj7487jhdcgj6c7bvlkckn535bhbryd";
+        libName = "hyper_util";
+        authors = [
+          "Sean McArthur <sean@seanmonstar.com>"
+        ];
+        dependencies = [
+          {
+            name = "bytes";
+            packageId = "bytes";
+          }
+          {
+            name = "futures-channel";
+            packageId = "futures-channel";
+            optional = true;
+          }
+          {
+            name = "futures-util";
+            packageId = "futures-util";
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "http";
+            packageId = "http";
+          }
+          {
+            name = "http-body";
+            packageId = "http-body";
+          }
+          {
+            name = "hyper";
+            packageId = "hyper";
+          }
+          {
+            name = "pin-project-lite";
+            packageId = "pin-project-lite";
+          }
+          {
+            name = "socket2";
+            packageId = "socket2";
+            optional = true;
+            features = [ "all" ];
+          }
+          {
+            name = "tokio";
+            packageId = "tokio";
+            optional = true;
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "tower";
+            packageId = "tower";
+            optional = true;
+            usesDefaultFeatures = false;
+            features = [ "make" "util" ];
+          }
+          {
+            name = "tower-service";
+            packageId = "tower-service";
+            optional = true;
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+            optional = true;
+            usesDefaultFeatures = false;
+            features = [ "std" ];
+          }
+        ];
+        devDependencies = [
+          {
+            name = "bytes";
+            packageId = "bytes";
+          }
+          {
+            name = "hyper";
+            packageId = "hyper";
+            features = [ "full" ];
+          }
+          {
+            name = "tokio";
+            packageId = "tokio";
+            features = [ "macros" "test-util" "signal" ];
+          }
+        ];
+        features = {
+          "client" = [ "hyper/client" "dep:tracing" "dep:futures-channel" "dep:tower" "dep:tower-service" ];
+          "client-legacy" = [ "client" "dep:socket2" "tokio/sync" ];
+          "full" = [ "client" "client-legacy" "server" "server-auto" "server-graceful" "service" "http1" "http2" "tokio" ];
+          "http1" = [ "hyper/http1" ];
+          "http2" = [ "hyper/http2" ];
+          "server" = [ "hyper/server" ];
+          "server-auto" = [ "server" "http1" "http2" ];
+          "server-graceful" = [ "server" "tokio/sync" ];
+          "service" = [ "dep:tower" "dep:tower-service" ];
+          "tokio" = [ "dep:tokio" "tokio/net" "tokio/rt" "tokio/time" ];
+        };
+        resolvedDefaultFeatures = [ "client" "client-legacy" "default" "http1" "http2" "server" "server-auto" "service" "tokio" ];
+      };
       "iana-time-zone" = rec {
         crateName = "iana-time-zone";
         version = "0.1.60";
         edition = "2018";
         sha256 = "0hdid5xz3jznm04lysjm3vi93h3c523w0hcc3xba47jl3ddbpzz7";
+        libName = "iana_time_zone";
         authors = [
           "Andrew Straw <strawman@astraw.com>"
           "Renรฉ Kijewski <rene.kijewski@fu-berlin.de>"
@@ -4840,7 +5315,7 @@ rec {
           }
           {
             name = "windows-core";
-            packageId = "windows-core";
+            packageId = "windows-core 0.52.0";
             target = { target, features }: ("windows" == target."os" or null);
           }
         ];
@@ -4852,6 +5327,7 @@ rec {
         version = "0.1.2";
         edition = "2018";
         sha256 = "17r6jmj31chn7xs9698r122mapq85mfnv98bb4pg6spm0si2f67k";
+        libName = "iana_time_zone_haiku";
         authors = [
           "Renรฉ Kijewski <crates.io@k6i.de>"
         ];
@@ -4900,94 +5376,6 @@ rec {
         };
         resolvedDefaultFeatures = [ "alloc" "default" "std" ];
       };
-      "imbl" = rec {
-        crateName = "imbl";
-        version = "2.0.3";
-        edition = "2018";
-        sha256 = "11bhchs0d1bbbmr8ari4y4d62vqxs7xg4fkhjlhgbv98h0n193cp";
-        authors = [
-          "Bodil Stokke <bodil@bodil.org>"
-          "Joe Neeman <joeneeman@gmail.com>"
-        ];
-        dependencies = [
-          {
-            name = "bitmaps";
-            packageId = "bitmaps";
-          }
-          {
-            name = "imbl-sized-chunks";
-            packageId = "imbl-sized-chunks";
-          }
-          {
-            name = "proptest";
-            packageId = "proptest";
-            optional = true;
-          }
-          {
-            name = "rand_core";
-            packageId = "rand_core";
-          }
-          {
-            name = "rand_xoshiro";
-            packageId = "rand_xoshiro";
-          }
-          {
-            name = "serde";
-            packageId = "serde";
-            optional = true;
-          }
-        ];
-        buildDependencies = [
-          {
-            name = "version_check";
-            packageId = "version_check";
-          }
-        ];
-        devDependencies = [
-          {
-            name = "proptest";
-            packageId = "proptest";
-          }
-          {
-            name = "serde";
-            packageId = "serde";
-          }
-        ];
-        features = {
-          "arbitrary" = [ "dep:arbitrary" ];
-          "proptest" = [ "dep:proptest" ];
-          "quickcheck" = [ "dep:quickcheck" ];
-          "rayon" = [ "dep:rayon" ];
-          "refpool" = [ "dep:refpool" ];
-          "serde" = [ "dep:serde" ];
-        };
-        resolvedDefaultFeatures = [ "proptest" "serde" ];
-      };
-      "imbl-sized-chunks" = rec {
-        crateName = "imbl-sized-chunks";
-        version = "0.1.2";
-        edition = "2021";
-        sha256 = "0qzdw55na2w6fd44p7y9rh05nxa98gzpaigmwg57sy7db3xhch0l";
-        authors = [
-          "Bodil Stokke <bodil@bodil.org>"
-          "Joe Neeman <joeneeman@gmail.com>"
-        ];
-        dependencies = [
-          {
-            name = "bitmaps";
-            packageId = "bitmaps";
-            usesDefaultFeatures = false;
-          }
-        ];
-        features = {
-          "arbitrary" = [ "dep:arbitrary" ];
-          "array-ops" = [ "dep:array-ops" ];
-          "default" = [ "std" ];
-          "refpool" = [ "dep:refpool" ];
-          "ringbuffer" = [ "array-ops" ];
-        };
-        resolvedDefaultFeatures = [ "default" "std" ];
-      };
       "indexmap 1.9.3" = rec {
         crateName = "indexmap";
         version = "1.9.3";
@@ -5057,6 +5445,55 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "serde" "std" ];
       };
+      "indicatif" = rec {
+        crateName = "indicatif";
+        version = "0.17.8";
+        edition = "2021";
+        sha256 = "18xyqxw9i5x4sbpzckhfz3nm984iq9r7nbi2lk76nz888n7mlfkn";
+        dependencies = [
+          {
+            name = "console";
+            packageId = "console";
+            usesDefaultFeatures = false;
+            features = [ "ansi-parsing" ];
+          }
+          {
+            name = "instant";
+            packageId = "instant";
+            target = { target, features }: ("wasm32" == target."arch" or null);
+          }
+          {
+            name = "number_prefix";
+            packageId = "number_prefix";
+          }
+          {
+            name = "portable-atomic";
+            packageId = "portable-atomic";
+          }
+          {
+            name = "unicode-width";
+            packageId = "unicode-width";
+            optional = true;
+          }
+          {
+            name = "vt100";
+            packageId = "vt100";
+            optional = true;
+          }
+        ];
+        features = {
+          "default" = [ "unicode-width" "console/unicode-width" ];
+          "futures" = [ "dep:futures-core" ];
+          "improved_unicode" = [ "unicode-segmentation" "unicode-width" "console/unicode-width" ];
+          "in_memory" = [ "vt100" ];
+          "rayon" = [ "dep:rayon" ];
+          "tokio" = [ "dep:tokio" ];
+          "unicode-segmentation" = [ "dep:unicode-segmentation" ];
+          "unicode-width" = [ "dep:unicode-width" ];
+          "vt100" = [ "dep:vt100" ];
+        };
+        resolvedDefaultFeatures = [ "default" "in_memory" "unicode-width" "vt100" ];
+      };
       "instant" = rec {
         crateName = "instant";
         version = "0.1.12";
@@ -5112,6 +5549,7 @@ rec {
         version = "0.4.10";
         edition = "2018";
         sha256 = "0m9la3f7cs77y85nkbcjsxkb7k861fc6bdhahyfidgh7gljh1b8b";
+        libName = "is_terminal";
         authors = [
           "softprops <d.tangren@gmail.com>"
           "Dan Gohman <dev@sunfishcode.online>"
@@ -5145,6 +5583,14 @@ rec {
         ];
 
       };
+      "is_terminal_polyfill" = rec {
+        crateName = "is_terminal_polyfill";
+        version = "1.70.1";
+        edition = "2021";
+        sha256 = "1kwfgglh91z33kl0w5i338mfpa3zs0hidq5j4ny4rmjwrikchhvr";
+        features = { };
+        resolvedDefaultFeatures = [ "default" ];
+      };
       "itertools 0.10.5" = rec {
         crateName = "itertools";
         version = "0.10.5";
@@ -5166,11 +5612,11 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "use_alloc" "use_std" ];
       };
-      "itertools 0.11.0" = rec {
+      "itertools 0.12.1" = rec {
         crateName = "itertools";
-        version = "0.11.0";
+        version = "0.12.1";
         edition = "2018";
-        sha256 = "0mzyqcc59azx9g5cg6fs8k529gvh4463smmka6jvzs3cd2jp7hdi";
+        sha256 = "0s95jbb3ndj1lvfxyq5wanc0fm0r6hg6q4ngb92qlfdxvci10ads";
         authors = [
           "bluss"
         ];
@@ -5185,13 +5631,13 @@ rec {
           "default" = [ "use_std" ];
           "use_std" = [ "use_alloc" "either/use_std" ];
         };
-        resolvedDefaultFeatures = [ "use_alloc" ];
+        resolvedDefaultFeatures = [ "default" "use_alloc" "use_std" ];
       };
-      "itertools 0.12.0" = rec {
+      "itertools 0.13.0" = rec {
         crateName = "itertools";
-        version = "0.12.0";
+        version = "0.13.0";
         edition = "2018";
-        sha256 = "1c07gzdlc6a1c8p8jrvvw3gs52bss3y58cs2s21d9i978l36pnr5";
+        sha256 = "11hiy3qzl643zcigknclh446qb9zlg4dpdzfkjaa9q9fqpgyfgj1";
         authors = [
           "bluss"
         ];
@@ -5222,9 +5668,9 @@ rec {
       };
       "jobserver" = rec {
         crateName = "jobserver";
-        version = "0.1.27";
-        edition = "2018";
-        sha256 = "0z9w6vfqwbr6hfk9yaw7kydlh6f7k39xdlszxlh39in4acwzcdwc";
+        version = "0.1.32";
+        edition = "2021";
+        sha256 = "1l2k50qmj84x9mn39ivjz76alqmx72jhm12rw33zx9xnpv5xpla8";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -5242,6 +5688,7 @@ rec {
         version = "0.3.67";
         edition = "2018";
         sha256 = "1lar78p13w781b4zf44a0sk26i461fczbdrhpan6kjav4gqkc7cs";
+        libName = "js_sys";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -5255,9 +5702,9 @@ rec {
       };
       "lazy_static" = rec {
         crateName = "lazy_static";
-        version = "1.4.0";
+        version = "1.5.0";
         edition = "2015";
-        sha256 = "0in6ikhw8mgl33wjv6q6xfrb5b9jr16q8ygjy803fay4zcisvaz2";
+        sha256 = "1zk6dqqni0193xg6iijh7i3i44sryglwgvx20spdvwk3r6sbrlmv";
         authors = [
           "Marvin Lรถbel <loebel.marvin@gmail.com>"
         ];
@@ -5271,6 +5718,7 @@ rec {
         version = "0.8.5";
         edition = "2018";
         sha256 = "0ihf0x3vrk25fq3bv9q35m0xax0wmvwkh0j0pjm2yk4ddvh5vpic";
+        libName = "lexical_core";
         authors = [
           "Alex Huszagh <ahuszagh@gmail.com>"
         ];
@@ -5333,6 +5781,7 @@ rec {
         version = "0.8.5";
         edition = "2018";
         sha256 = "0py0gp8hlzcrlvjqmqlpl2v1as65iiqxq2xsabxvhc01pmg3lfv8";
+        libName = "lexical_parse_float";
         authors = [
           "Alex Huszagh <ahuszagh@gmail.com>"
         ];
@@ -5373,6 +5822,7 @@ rec {
         version = "0.8.6";
         edition = "2018";
         sha256 = "1sayji3mpvb2xsjq56qcq3whfz8px9a6fxk5v7v15hyhbr4982bd";
+        libName = "lexical_parse_integer";
         authors = [
           "Alex Huszagh <ahuszagh@gmail.com>"
         ];
@@ -5404,6 +5854,7 @@ rec {
         version = "0.8.5";
         edition = "2018";
         sha256 = "1z73qkv7yxhsbc4aiginn1dqmsj8jarkrdlyxc88g2gz2vzvjmaj";
+        libName = "lexical_util";
         authors = [
           "Alex Huszagh <ahuszagh@gmail.com>"
         ];
@@ -5430,6 +5881,7 @@ rec {
         version = "0.8.5";
         edition = "2018";
         sha256 = "0qk825l0csvnksh9sywb51996cjc2bylq6rxjaiha7sqqjhvmjmc";
+        libName = "lexical_write_float";
         authors = [
           "Alex Huszagh <ahuszagh@gmail.com>"
         ];
@@ -5470,6 +5922,7 @@ rec {
         version = "0.8.5";
         edition = "2018";
         sha256 = "0ii4hmvqrg6pd4j9y1pkhkp0nw2wpivjzmljh6v6ca22yk8z7dp1";
+        libName = "lexical_write_integer";
         authors = [
           "Alex Huszagh <ahuszagh@gmail.com>"
         ];
@@ -5498,9 +5951,9 @@ rec {
       };
       "libc" = rec {
         crateName = "libc";
-        version = "0.2.152";
+        version = "0.2.158";
         edition = "2015";
-        sha256 = "1rsnma7hnw22w7jh9yqg43slddvfbnfzrvm3s7s4kinbj1jvzqqk";
+        sha256 = "0fb4qldw1jrxljrwz6bsjn8lv4rqizlqmab41q3j98q332xw9bfq";
         authors = [
           "The Rust Project Developers"
         ];
@@ -5526,6 +5979,33 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" ];
       };
+      "libmimalloc-sys" = rec {
+        crateName = "libmimalloc-sys";
+        version = "0.1.39";
+        edition = "2018";
+        links = "mimalloc";
+        sha256 = "0i3b0dzz7cp0ik7ys66q92r16va78gwlbrnxhj5fnkdxsc8niai3";
+        libName = "libmimalloc_sys";
+        authors = [
+          "Octavian Oncescu <octavonce@gmail.com>"
+        ];
+        dependencies = [
+          {
+            name = "libc";
+            packageId = "libc";
+          }
+        ];
+        buildDependencies = [
+          {
+            name = "cc";
+            packageId = "cc";
+          }
+        ];
+        features = {
+          "cty" = [ "dep:cty" ];
+          "extended" = [ "cty" ];
+        };
+      };
       "libredox" = rec {
         crateName = "libredox";
         version = "0.0.1";
@@ -5537,7 +6017,7 @@ rec {
         dependencies = [
           {
             name = "bitflags";
-            packageId = "bitflags 2.4.2";
+            packageId = "bitflags 2.6.0";
           }
           {
             name = "libc";
@@ -5556,9 +6036,10 @@ rec {
       };
       "linux-raw-sys" = rec {
         crateName = "linux-raw-sys";
-        version = "0.4.13";
+        version = "0.4.14";
         edition = "2021";
-        sha256 = "172k2c6422gsc914ig8rh99mb9yc7siw6ikc3d9xw1k7vx0s3k81";
+        sha256 = "12gsjgbhhjwywpqcrizv80vrp7p7grsz5laqq773i33wphjsxcvq";
+        libName = "linux_raw_sys";
         authors = [
           "Dan Gohman <dev@sunfishcode.online>"
         ];
@@ -5633,11 +6114,53 @@ rec {
         };
         resolvedDefaultFeatures = [ "std" ];
       };
+      "loom" = rec {
+        crateName = "loom";
+        version = "0.7.2";
+        edition = "2018";
+        sha256 = "1jpszf9qxv8ydpsm2h9vcyvxvyxcfkhmmfbylzd4gfbc0k40v7j1";
+        authors = [
+          "Carl Lerche <me@carllerche.com>"
+        ];
+        dependencies = [
+          {
+            name = "cfg-if";
+            packageId = "cfg-if";
+          }
+          {
+            name = "generator";
+            packageId = "generator";
+          }
+          {
+            name = "scoped-tls";
+            packageId = "scoped-tls";
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+            usesDefaultFeatures = false;
+            features = [ "std" ];
+          }
+          {
+            name = "tracing-subscriber";
+            packageId = "tracing-subscriber";
+            features = [ "env-filter" ];
+          }
+        ];
+        features = {
+          "checkpoint" = [ "serde" "serde_json" ];
+          "futures" = [ "pin-utils" ];
+          "pin-utils" = [ "dep:pin-utils" ];
+          "serde" = [ "dep:serde" ];
+          "serde_json" = [ "dep:serde_json" ];
+        };
+        resolvedDefaultFeatures = [ "default" ];
+      };
       "lru" = rec {
         crateName = "lru";
-        version = "0.12.3";
+        version = "0.12.4";
         edition = "2015";
-        sha256 = "1p5hryc967wdh56q9wzb2x9gdqy3yd0sqmnb2fcf7z28wrsjw9nk";
+        sha256 = "017rzh4kyl3j79sj0qc35wallblsqbnkzxpn6i3xkrv02y4kkvip";
         authors = [
           "Jerome Froelich <jeromefroelic@hotmail.com>"
         ];
@@ -5661,6 +6184,7 @@ rec {
         edition = "2018";
         links = "lzma";
         sha256 = "09sxp20waxyglgn3cjz8qjkspb3ryz2fwx4rigkwvrk46ymh9njz";
+        libName = "lzma_sys";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -5698,7 +6222,7 @@ rec {
         dependencies = [
           {
             name = "bitflags";
-            packageId = "bitflags 2.4.2";
+            packageId = "bitflags 2.6.0";
           }
           {
             name = "libc";
@@ -5722,6 +6246,7 @@ rec {
         edition = "2015";
         links = "magic";
         sha256 = "1g5k9d9igxv4h23nbhp8bqa5gdpkd3ahgm0rh5i0s54mi3h6my7g";
+        libName = "magic_sys";
         authors = [
           "robo9k <robo9k@symlink.io>"
         ];
@@ -5772,7 +6297,7 @@ rec {
         ];
 
       };
-      "matchit" = rec {
+      "matchit 0.7.3" = rec {
         crateName = "matchit";
         version = "0.7.3";
         edition = "2021";
@@ -5783,6 +6308,17 @@ rec {
         features = { };
         resolvedDefaultFeatures = [ "default" ];
       };
+      "matchit 0.8.4" = rec {
+        crateName = "matchit";
+        version = "0.8.4";
+        edition = "2021";
+        sha256 = "1hzl48fwq1cn5dvshfly6vzkzqhfihya65zpj7nz7lfx82mgzqa7";
+        authors = [
+          "Ibraheem Ahmed <ibraheem@ibraheem.ca>"
+        ];
+        features = { };
+        resolvedDefaultFeatures = [ "default" ];
+      };
       "md-5" = rec {
         crateName = "md-5";
         version = "0.10.6";
@@ -5872,6 +6408,34 @@ rec {
         features = { };
         resolvedDefaultFeatures = [ "default" ];
       };
+      "mimalloc" = rec {
+        crateName = "mimalloc";
+        version = "0.1.43";
+        edition = "2018";
+        sha256 = "0csnyrxc16i592gm5ffham07jyj2w98qsh9jyy1rv59lmr8474b8";
+        authors = [
+          "Octavian Oncescu <octavonce@gmail.com>"
+          "Vincent Rouillรฉ <vincent@speedy37.fr>"
+          "Thom Chiovoloni <chiovolonit@gmail.com>"
+        ];
+        dependencies = [
+          {
+            name = "libmimalloc-sys";
+            packageId = "libmimalloc-sys";
+            usesDefaultFeatures = false;
+          }
+        ];
+        features = {
+          "debug" = [ "libmimalloc-sys/debug" ];
+          "debug_in_debug" = [ "libmimalloc-sys/debug_in_debug" ];
+          "extended" = [ "libmimalloc-sys/extended" ];
+          "local_dynamic_tls" = [ "libmimalloc-sys/local_dynamic_tls" ];
+          "no_thp" = [ "libmimalloc-sys/no_thp" ];
+          "override" = [ "libmimalloc-sys/override" ];
+          "secure" = [ "libmimalloc-sys/secure" ];
+        };
+        resolvedDefaultFeatures = [ "default" ];
+      };
       "mime" = rec {
         crateName = "mime";
         version = "0.3.17";
@@ -5887,6 +6451,7 @@ rec {
         version = "0.2.1";
         edition = "2018";
         sha256 = "16ppc5g84aijpri4jzv14rvcnslvlpphbszc7zzp6vfkddf4qdb8";
+        libName = "minimal_lexical";
         authors = [
           "Alex Huszagh <ahuszagh@gmail.com>"
         ];
@@ -5922,7 +6487,7 @@ rec {
         };
         resolvedDefaultFeatures = [ "with-alloc" ];
       };
-      "mio" = rec {
+      "mio 0.8.11" = rec {
         crateName = "mio";
         version = "0.8.11";
         edition = "2018";
@@ -5965,7 +6530,53 @@ rec {
           "log" = [ "dep:log" ];
           "os-ext" = [ "os-poll" "windows-sys/Win32_System_Pipes" "windows-sys/Win32_Security" ];
         };
-        resolvedDefaultFeatures = [ "default" "log" "net" "os-ext" "os-poll" ];
+        resolvedDefaultFeatures = [ "default" "log" "os-ext" "os-poll" ];
+      };
+      "mio 1.0.2" = rec {
+        crateName = "mio";
+        version = "1.0.2";
+        edition = "2021";
+        sha256 = "1v1cnnn44awxbcfm4zlavwgkvbyg7gp5zzjm8mqf1apkrwflvq40";
+        authors = [
+          "Carl Lerche <me@carllerche.com>"
+          "Thomas de Zeeuw <thomasdezeeuw@gmail.com>"
+          "Tokio Contributors <team@tokio.rs>"
+        ];
+        dependencies = [
+          {
+            name = "hermit-abi";
+            packageId = "hermit-abi";
+            rename = "libc";
+            target = { target, features }: ("hermit" == target."os" or null);
+          }
+          {
+            name = "libc";
+            packageId = "libc";
+            target = { target, features }: ("wasi" == target."os" or null);
+          }
+          {
+            name = "libc";
+            packageId = "libc";
+            target = { target, features }: (target."unix" or false);
+          }
+          {
+            name = "wasi";
+            packageId = "wasi";
+            target = { target, features }: ("wasi" == target."os" or null);
+          }
+          {
+            name = "windows-sys";
+            packageId = "windows-sys 0.52.0";
+            target = { target, features }: (target."windows" or false);
+            features = [ "Wdk_Foundation" "Wdk_Storage_FileSystem" "Wdk_System_IO" "Win32_Foundation" "Win32_Networking_WinSock" "Win32_Storage_FileSystem" "Win32_System_IO" "Win32_System_WindowsProgramming" ];
+          }
+        ];
+        features = {
+          "default" = [ "log" ];
+          "log" = [ "dep:log" ];
+          "os-ext" = [ "os-poll" "windows-sys/Win32_System_Pipes" "windows-sys/Win32_Security" ];
+        };
+        resolvedDefaultFeatures = [ "net" "os-ext" "os-poll" ];
       };
       "multimap" = rec {
         crateName = "multimap";
@@ -5981,6 +6592,156 @@ rec {
           "serde_impl" = [ "serde" ];
         };
       };
+      "nar-bridge" = rec {
+        crateName = "nar-bridge";
+        version = "0.1.0";
+        edition = "2021";
+        crateBin = [
+          {
+            name = "nar-bridge";
+            path = "src/bin/nar-bridge.rs";
+            requiredFeatures = [ ];
+          }
+        ];
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ./nar-bridge; };
+        libName = "nar_bridge";
+        dependencies = [
+          {
+            name = "axum";
+            packageId = "axum";
+            features = [ "http2" ];
+          }
+          {
+            name = "bytes";
+            packageId = "bytes";
+          }
+          {
+            name = "clap";
+            packageId = "clap";
+            features = [ "derive" "env" ];
+          }
+          {
+            name = "data-encoding";
+            packageId = "data-encoding";
+          }
+          {
+            name = "futures";
+            packageId = "futures";
+          }
+          {
+            name = "itertools";
+            packageId = "itertools 0.12.1";
+          }
+          {
+            name = "lru";
+            packageId = "lru";
+          }
+          {
+            name = "mimalloc";
+            packageId = "mimalloc";
+          }
+          {
+            name = "nix-compat";
+            packageId = "nix-compat";
+            features = [ "async" ];
+          }
+          {
+            name = "parking_lot";
+            packageId = "parking_lot 0.12.3";
+          }
+          {
+            name = "prost";
+            packageId = "prost";
+          }
+          {
+            name = "serde";
+            packageId = "serde";
+            features = [ "derive" ];
+          }
+          {
+            name = "thiserror";
+            packageId = "thiserror";
+          }
+          {
+            name = "tokio";
+            packageId = "tokio";
+          }
+          {
+            name = "tokio-listener";
+            packageId = "tokio-listener";
+            features = [ "axum07" "clap" "multi-listener" "sd_listen" ];
+          }
+          {
+            name = "tokio-util";
+            packageId = "tokio-util";
+            features = [ "io" "io-util" "compat" ];
+          }
+          {
+            name = "tonic";
+            packageId = "tonic";
+            features = [ "tls" "tls-roots" ];
+          }
+          {
+            name = "tower";
+            packageId = "tower";
+          }
+          {
+            name = "tower-http";
+            packageId = "tower-http";
+            features = [ "trace" ];
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+          }
+          {
+            name = "tracing-subscriber";
+            packageId = "tracing-subscriber";
+          }
+          {
+            name = "tvix-castore";
+            packageId = "tvix-castore";
+          }
+          {
+            name = "tvix-store";
+            packageId = "tvix-store";
+          }
+          {
+            name = "tvix-tracing";
+            packageId = "tvix-tracing";
+            features = [ "tonic" "axum" ];
+          }
+          {
+            name = "url";
+            packageId = "url";
+          }
+        ];
+        buildDependencies = [
+          {
+            name = "prost-build";
+            packageId = "prost-build";
+          }
+          {
+            name = "tonic-build";
+            packageId = "tonic-build";
+          }
+        ];
+        devDependencies = [
+          {
+            name = "hex-literal";
+            packageId = "hex-literal";
+          }
+          {
+            name = "rstest";
+            packageId = "rstest";
+          }
+        ];
+        features = {
+          "default" = [ "otlp" ];
+          "otlp" = [ "tvix-tracing/otlp" ];
+        };
+        resolvedDefaultFeatures = [ "default" "otlp" ];
+      };
       "nibble_vec" = rec {
         crateName = "nibble_vec";
         version = "0.1.0";
@@ -6144,7 +6905,7 @@ rec {
         dependencies = [
           {
             name = "bitflags";
-            packageId = "bitflags 2.4.2";
+            packageId = "bitflags 2.6.0";
           }
           {
             name = "cfg-if";
@@ -6185,16 +6946,12 @@ rec {
             requiredFeatures = [ ];
           }
         ];
-        # We can't filter paths with references in Nix 2.4
-        # See https://github.com/NixOS/nix/issues/5410
-        src =
-          if ((lib.versionOlder builtins.nixVersion "2.4pre20211007") || (lib.versionOlder "2.5" builtins.nixVersion))
-          then lib.cleanSourceWith { filter = sourceFilter; src = ./nix-compat; }
-          else ./nix-compat;
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ./nix-compat; };
+        libName = "nix_compat";
         dependencies = [
           {
             name = "bitflags";
-            packageId = "bitflags 2.4.2";
+            packageId = "bitflags 2.6.0";
           }
           {
             name = "bstr";
@@ -6202,6 +6959,11 @@ rec {
             features = [ "alloc" "unicode" "serde" ];
           }
           {
+            name = "bytes";
+            packageId = "bytes";
+            optional = true;
+          }
+          {
             name = "data-encoding";
             packageId = "data-encoding";
           }
@@ -6222,6 +6984,15 @@ rec {
             packageId = "glob";
           }
           {
+            name = "mimalloc";
+            packageId = "mimalloc";
+          }
+          {
+            name = "nix-compat-derive";
+            packageId = "nix-compat-derive";
+            optional = true;
+          }
+          {
             name = "nom";
             packageId = "nom";
           }
@@ -6257,6 +7028,10 @@ rec {
             optional = true;
             features = [ "io-util" "macros" ];
           }
+          {
+            name = "tracing";
+            packageId = "tracing";
+          }
         ];
         devDependencies = [
           {
@@ -6267,8 +7042,6 @@ rec {
           {
             name = "futures";
             packageId = "futures";
-            usesDefaultFeatures = false;
-            features = [ "executor" ];
           }
           {
             name = "hex-literal";
@@ -6279,6 +7052,10 @@ rec {
             packageId = "lazy_static";
           }
           {
+            name = "mimalloc";
+            packageId = "mimalloc";
+          }
+          {
             name = "pretty_assertions";
             packageId = "pretty_assertions";
           }
@@ -6291,6 +7068,10 @@ rec {
             packageId = "serde_json";
           }
           {
+            name = "smol_str";
+            packageId = "smol_str";
+          }
+          {
             name = "tokio-test";
             packageId = "tokio-test";
           }
@@ -6301,12 +7082,127 @@ rec {
         ];
         features = {
           "async" = [ "tokio" ];
-          "default" = [ "async" "wire" ];
+          "bytes" = [ "dep:bytes" ];
+          "default" = [ "async" "wire" "nix-compat-derive" ];
+          "nix-compat-derive" = [ "dep:nix-compat-derive" ];
           "pin-project-lite" = [ "dep:pin-project-lite" ];
           "tokio" = [ "dep:tokio" ];
-          "wire" = [ "tokio" "pin-project-lite" ];
+          "wire" = [ "tokio" "pin-project-lite" "bytes" ];
         };
-        resolvedDefaultFeatures = [ "async" "default" "pin-project-lite" "tokio" "wire" ];
+        resolvedDefaultFeatures = [ "async" "bytes" "default" "nix-compat-derive" "pin-project-lite" "test" "tokio" "wire" ];
+      };
+      "nix-compat-derive" = rec {
+        crateName = "nix-compat-derive";
+        version = "0.1.0";
+        edition = "2021";
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ./nix-compat-derive; };
+        procMacro = true;
+        libName = "nix_compat_derive";
+        dependencies = [
+          {
+            name = "proc-macro2";
+            packageId = "proc-macro2";
+            features = [ "proc-macro" ];
+          }
+          {
+            name = "quote";
+            packageId = "quote";
+            features = [ "proc-macro" ];
+          }
+          {
+            name = "syn";
+            packageId = "syn 2.0.76";
+            features = [ "full" "extra-traits" ];
+          }
+        ];
+        devDependencies = [
+          {
+            name = "hex-literal";
+            packageId = "hex-literal";
+          }
+          {
+            name = "nix-compat";
+            packageId = "nix-compat";
+            usesDefaultFeatures = false;
+            features = [ "async" "wire" "test" ];
+          }
+          {
+            name = "pretty_assertions";
+            packageId = "pretty_assertions";
+          }
+          {
+            name = "rstest";
+            packageId = "rstest";
+          }
+          {
+            name = "tokio";
+            packageId = "tokio";
+            features = [ "io-util" "macros" ];
+          }
+          {
+            name = "tokio-test";
+            packageId = "tokio-test";
+          }
+        ];
+
+      };
+      "nix-compat-derive-tests" = rec {
+        crateName = "nix-compat-derive-tests";
+        version = "0.1.0";
+        edition = "2021";
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ./nix-compat-derive-tests; };
+        devDependencies = [
+          {
+            name = "hex-literal";
+            packageId = "hex-literal";
+          }
+          {
+            name = "nix-compat";
+            packageId = "nix-compat";
+            features = [ "test" "wire" ];
+          }
+          {
+            name = "nix-compat-derive";
+            packageId = "nix-compat-derive";
+          }
+          {
+            name = "pretty_assertions";
+            packageId = "pretty_assertions";
+          }
+          {
+            name = "rstest";
+            packageId = "rstest";
+          }
+          {
+            name = "tokio";
+            packageId = "tokio";
+            features = [ "io-util" "macros" ];
+          }
+          {
+            name = "tokio-test";
+            packageId = "tokio-test";
+          }
+          {
+            name = "trybuild";
+            packageId = "trybuild";
+          }
+        ];
+        features = { };
+        resolvedDefaultFeatures = [ "compile-tests" ];
+      };
+      "nohash-hasher" = rec {
+        crateName = "nohash-hasher";
+        version = "0.2.0";
+        edition = "2018";
+        sha256 = "0lf4p6k01w4wm7zn4grnihzj8s7zd5qczjmzng7wviwxawih5x9b";
+        libName = "nohash_hasher";
+        authors = [
+          "Parity Technologies <admin@parity.io>"
+        ];
+        features = {
+          "default" = [ "std" ];
+        };
+        resolvedDefaultFeatures = [ "default" "std" ];
       };
       "nom" = rec {
         crateName = "nom";
@@ -6358,6 +7254,7 @@ rec {
         version = "0.46.0";
         edition = "2018";
         sha256 = "115sywxh53p190lyw97alm14nc004qj5jm5lvdj608z84rbida3p";
+        libName = "nu_ansi_term";
         authors = [
           "ogham@bsago.me"
           "Ryan Scheel (Havvy) <ryan.havvy@gmail.com>"
@@ -6386,6 +7283,7 @@ rec {
         version = "0.1.0";
         edition = "2021";
         sha256 = "1ndiyg82q73783jq18isi71a7mjh56wxrk52rlvyx0mi5z9ibmai";
+        libName = "num_conv";
         authors = [
           "Jacob Pratt <jacob@jhpratt.dev>"
         ];
@@ -6393,9 +7291,10 @@ rec {
       };
       "num-traits" = rec {
         crateName = "num-traits";
-        version = "0.2.18";
-        edition = "2018";
-        sha256 = "0yjib8p2p9kzmaz48xwhs69w5dh1wipph9jgnillzd2x33jz03fs";
+        version = "0.2.19";
+        edition = "2021";
+        sha256 = "0h984rhdkkqd4ny9cif7y2azl3xdfb7768hb9irhpsch4q3gq787";
+        libName = "num_traits";
         authors = [
           "The Rust Project Developers"
         ];
@@ -6440,6 +7339,19 @@ rec {
         ];
 
       };
+      "number_prefix" = rec {
+        crateName = "number_prefix";
+        version = "0.4.0";
+        edition = "2015";
+        sha256 = "1wvh13wvlajqxkb1filsfzbrnq0vrmrw298v2j3sy82z1rm282w3";
+        authors = [
+          "Benjamin Sago <ogham@bsago.me>"
+        ];
+        features = {
+          "default" = [ "std" ];
+        };
+        resolvedDefaultFeatures = [ "default" "std" ];
+      };
       "object" = rec {
         crateName = "object";
         version = "0.32.2";
@@ -6474,9 +7386,9 @@ rec {
       };
       "object_store" = rec {
         crateName = "object_store";
-        version = "0.9.1";
+        version = "0.10.2";
         edition = "2021";
-        sha256 = "1cwx0xg57cp3z6xjgrqwp0gxgxsagls4h5cd212pmxpxcn5qywdq";
+        sha256 = "1wz3m20hqs3v93dyxcqy7qpsbd4rqp6050hy49wcw5f740l4bnp6";
         dependencies = [
           {
             name = "async-trait";
@@ -6484,7 +7396,7 @@ rec {
           }
           {
             name = "base64";
-            packageId = "base64";
+            packageId = "base64 0.22.1";
             optional = true;
             usesDefaultFeatures = false;
             features = [ "std" ];
@@ -6515,7 +7427,7 @@ rec {
           }
           {
             name = "itertools";
-            packageId = "itertools 0.12.0";
+            packageId = "itertools 0.13.0";
           }
           {
             name = "md-5";
@@ -6525,7 +7437,7 @@ rec {
           }
           {
             name = "parking_lot";
-            packageId = "parking_lot 0.12.2";
+            packageId = "parking_lot 0.12.3";
           }
           {
             name = "percent-encoding";
@@ -6549,7 +7461,7 @@ rec {
             packageId = "reqwest";
             optional = true;
             usesDefaultFeatures = false;
-            features = [ "rustls-tls-native-roots" ];
+            features = [ "rustls-tls-native-roots" "http2" ];
           }
           {
             name = "ring";
@@ -6560,7 +7472,7 @@ rec {
           }
           {
             name = "rustls-pemfile";
-            packageId = "rustls-pemfile 2.1.0";
+            packageId = "rustls-pemfile";
             optional = true;
             usesDefaultFeatures = false;
             features = [ "std" ];
@@ -6665,12 +7577,13 @@ rec {
         version = "0.1.5";
         edition = "2015";
         sha256 = "1kq18qm48rvkwgcggfkqq6pm948190czqc94d6bm2sir5hq1l0gz";
+        libName = "openssl_probe";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
 
       };
-      "opentelemetry" = rec {
+      "opentelemetry 0.22.0" = rec {
         crateName = "opentelemetry";
         version = "0.22.0";
         edition = "2021";
@@ -6717,11 +7630,89 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "metrics" "pin-project-lite" "trace" ];
       };
+      "opentelemetry 0.24.0" = rec {
+        crateName = "opentelemetry";
+        version = "0.24.0";
+        edition = "2021";
+        sha256 = "15msgya5nandw9chxdr76k7sj9kg6gqj9dyfzrz5pxf4xrimldjc";
+        dependencies = [
+          {
+            name = "futures-core";
+            packageId = "futures-core";
+          }
+          {
+            name = "futures-sink";
+            packageId = "futures-sink";
+          }
+          {
+            name = "js-sys";
+            packageId = "js-sys";
+            target = { target, features }: (("wasm32" == target."arch" or null) && (!("wasi" == target."os" or null)));
+          }
+          {
+            name = "once_cell";
+            packageId = "once_cell";
+          }
+          {
+            name = "pin-project-lite";
+            packageId = "pin-project-lite";
+            optional = true;
+          }
+          {
+            name = "thiserror";
+            packageId = "thiserror";
+            usesDefaultFeatures = false;
+          }
+        ];
+        features = {
+          "default" = [ "trace" "metrics" "logs" ];
+          "logs_level_enabled" = [ "logs" ];
+          "pin-project-lite" = [ "dep:pin-project-lite" ];
+          "testing" = [ "trace" "metrics" ];
+          "trace" = [ "pin-project-lite" ];
+        };
+        resolvedDefaultFeatures = [ "default" "logs" "metrics" "pin-project-lite" "trace" ];
+      };
+      "opentelemetry-http" = rec {
+        crateName = "opentelemetry-http";
+        version = "0.13.0";
+        edition = "2021";
+        sha256 = "1avqmyh42apakbkhjij3c9hl0brnq5v37zk4kpxkhdgf8kgfjcdd";
+        libName = "opentelemetry_http";
+        dependencies = [
+          {
+            name = "async-trait";
+            packageId = "async-trait";
+          }
+          {
+            name = "bytes";
+            packageId = "bytes";
+          }
+          {
+            name = "http";
+            packageId = "http";
+            usesDefaultFeatures = false;
+            features = [ "std" ];
+          }
+          {
+            name = "opentelemetry";
+            packageId = "opentelemetry 0.24.0";
+            features = [ "trace" ];
+          }
+        ];
+        features = {
+          "hyper" = [ "dep:http-body-util" "dep:hyper" "dep:hyper-util" "dep:tokio" ];
+          "reqwest" = [ "dep:reqwest" ];
+          "reqwest-rustls" = [ "reqwest" "reqwest/rustls-tls-native-roots" ];
+          "reqwest-rustls-webpki-roots" = [ "reqwest" "reqwest/rustls-tls-webpki-roots" ];
+        };
+      };
       "opentelemetry-otlp" = rec {
         crateName = "opentelemetry-otlp";
-        version = "0.15.0";
+        version = "0.17.0";
         edition = "2021";
-        sha256 = "1jxbi5w4xgwg4gcj0lz4310y926bglw25b2546pkkilmjj6nn08s";
+        sha256 = "09z70ygp6lfcplnwx7cgf3p3fyq2arkvhxhj8avnz4gv5xh5m4kb";
+        libName = "opentelemetry_otlp";
         dependencies = [
           {
             name = "async-trait";
@@ -6736,10 +7727,11 @@ rec {
             packageId = "http";
             optional = true;
             usesDefaultFeatures = false;
+            features = [ "std" ];
           }
           {
             name = "opentelemetry";
-            packageId = "opentelemetry";
+            packageId = "opentelemetry 0.24.0";
             usesDefaultFeatures = false;
           }
           {
@@ -6748,12 +7740,8 @@ rec {
             usesDefaultFeatures = false;
           }
           {
-            name = "opentelemetry-semantic-conventions";
-            packageId = "opentelemetry-semantic-conventions";
-          }
-          {
             name = "opentelemetry_sdk";
-            packageId = "opentelemetry_sdk";
+            packageId = "opentelemetry_sdk 0.24.1";
             usesDefaultFeatures = false;
           }
           {
@@ -6789,10 +7777,11 @@ rec {
           }
         ];
         features = {
-          "default" = [ "grpc-tonic" "trace" ];
+          "default" = [ "grpc-tonic" "trace" "metrics" "logs" ];
           "grpc-tonic" = [ "tonic" "prost" "http" "tokio" "opentelemetry-proto/gen-tonic" ];
           "gzip-tonic" = [ "tonic/gzip" ];
           "http" = [ "dep:http" ];
+          "http-json" = [ "serde_json" "prost" "opentelemetry-http" "opentelemetry-proto/gen-tonic-messages" "opentelemetry-proto/with-serde" "http" "trace" "metrics" ];
           "http-proto" = [ "prost" "opentelemetry-http" "opentelemetry-proto/gen-tonic-messages" "http" "trace" "metrics" ];
           "integration-testing" = [ "tonic" "prost" "tokio/full" "trace" ];
           "logs" = [ "opentelemetry/logs" "opentelemetry_sdk/logs" "opentelemetry-proto/logs" ];
@@ -6802,31 +7791,35 @@ rec {
           "reqwest" = [ "dep:reqwest" ];
           "reqwest-blocking-client" = [ "reqwest/blocking" "opentelemetry-http/reqwest" ];
           "reqwest-client" = [ "reqwest" "opentelemetry-http/reqwest" ];
-          "reqwest-rustls" = [ "reqwest" "reqwest/rustls-tls-native-roots" ];
+          "reqwest-rustls" = [ "reqwest" "opentelemetry-http/reqwest-rustls" ];
+          "reqwest-rustls-webpki-roots" = [ "reqwest" "opentelemetry-http/reqwest-rustls-webpki-roots" ];
           "serde" = [ "dep:serde" ];
-          "serialize" = [ "serde" ];
+          "serde_json" = [ "dep:serde_json" ];
+          "serialize" = [ "serde" "serde_json" ];
           "tls" = [ "tonic/tls" ];
           "tls-roots" = [ "tls" "tonic/tls-roots" ];
+          "tls-webpki-roots" = [ "tls" "tonic/tls-webpki-roots" ];
           "tokio" = [ "dep:tokio" ];
           "tonic" = [ "dep:tonic" ];
           "trace" = [ "opentelemetry/trace" "opentelemetry_sdk/trace" "opentelemetry-proto/trace" ];
         };
-        resolvedDefaultFeatures = [ "default" "grpc-tonic" "http" "prost" "tokio" "tonic" "trace" ];
+        resolvedDefaultFeatures = [ "default" "grpc-tonic" "http" "logs" "metrics" "prost" "tokio" "tonic" "trace" ];
       };
       "opentelemetry-proto" = rec {
         crateName = "opentelemetry-proto";
-        version = "0.5.0";
+        version = "0.7.0";
         edition = "2021";
-        sha256 = "1r5a1k4fryqijhsar36ld806yf82isw11xfnx7d80nwgnv4xv3rs";
+        sha256 = "1nahv1dflvwdgi4c4p7ikd59x0yyivf85w02398q9jgrpwh9zvih";
+        libName = "opentelemetry_proto";
         dependencies = [
           {
             name = "opentelemetry";
-            packageId = "opentelemetry";
+            packageId = "opentelemetry 0.24.0";
             usesDefaultFeatures = false;
           }
           {
             name = "opentelemetry_sdk";
-            packageId = "opentelemetry_sdk";
+            packageId = "opentelemetry_sdk 0.24.1";
             usesDefaultFeatures = false;
           }
           {
@@ -6842,7 +7835,15 @@ rec {
             features = [ "codegen" "prost" ];
           }
         ];
+        devDependencies = [
+          {
+            name = "opentelemetry";
+            packageId = "opentelemetry 0.24.0";
+            features = [ "testing" ];
+          }
+        ];
         features = {
+          "default" = [ "full" ];
           "full" = [ "gen-tonic" "trace" "logs" "metrics" "zpages" "with-serde" ];
           "gen-tonic" = [ "gen-tonic-messages" "tonic/transport" ];
           "gen-tonic-messages" = [ "tonic" "prost" ];
@@ -6852,22 +7853,16 @@ rec {
           "prost" = [ "dep:prost" ];
           "schemars" = [ "dep:schemars" ];
           "serde" = [ "dep:serde" ];
+          "testing" = [ "opentelemetry/testing" ];
           "tonic" = [ "dep:tonic" ];
           "trace" = [ "opentelemetry/trace" "opentelemetry_sdk/trace" ];
           "with-schemars" = [ "schemars" ];
           "with-serde" = [ "serde" "hex" ];
           "zpages" = [ "trace" ];
         };
-        resolvedDefaultFeatures = [ "gen-tonic" "gen-tonic-messages" "prost" "tonic" "trace" ];
+        resolvedDefaultFeatures = [ "gen-tonic" "gen-tonic-messages" "logs" "metrics" "prost" "tonic" "trace" ];
       };
-      "opentelemetry-semantic-conventions" = rec {
-        crateName = "opentelemetry-semantic-conventions";
-        version = "0.14.0";
-        edition = "2021";
-        sha256 = "04197racbkpj75fh9jnwkdznjzv6l2ljpbr8ryfk9f9gqkb5pazr";
-
-      };
-      "opentelemetry_sdk" = rec {
+      "opentelemetry_sdk 0.22.1" = rec {
         crateName = "opentelemetry_sdk";
         version = "0.22.1";
         edition = "2021";
@@ -6908,7 +7903,7 @@ rec {
           }
           {
             name = "opentelemetry";
-            packageId = "opentelemetry";
+            packageId = "opentelemetry 0.22.0";
           }
           {
             name = "ordered-float";
@@ -6931,6 +7926,94 @@ rec {
             packageId = "thiserror";
             usesDefaultFeatures = false;
           }
+        ];
+        features = {
+          "async-std" = [ "dep:async-std" ];
+          "async-trait" = [ "dep:async-trait" ];
+          "crossbeam-channel" = [ "dep:crossbeam-channel" ];
+          "default" = [ "trace" ];
+          "glob" = [ "dep:glob" ];
+          "http" = [ "dep:http" ];
+          "jaeger_remote_sampler" = [ "trace" "opentelemetry-http" "http" "serde" "serde_json" "url" ];
+          "logs" = [ "opentelemetry/logs" "crossbeam-channel" "async-trait" "serde_json" ];
+          "logs_level_enabled" = [ "logs" "opentelemetry/logs_level_enabled" ];
+          "metrics" = [ "opentelemetry/metrics" "glob" "async-trait" ];
+          "opentelemetry-http" = [ "dep:opentelemetry-http" ];
+          "percent-encoding" = [ "dep:percent-encoding" ];
+          "rand" = [ "dep:rand" ];
+          "rt-async-std" = [ "async-std" ];
+          "rt-tokio" = [ "tokio" "tokio-stream" ];
+          "rt-tokio-current-thread" = [ "tokio" "tokio-stream" ];
+          "serde" = [ "dep:serde" ];
+          "serde_json" = [ "dep:serde_json" ];
+          "testing" = [ "opentelemetry/testing" "trace" "metrics" "logs" "rt-async-std" "rt-tokio" "rt-tokio-current-thread" "tokio/macros" "tokio/rt-multi-thread" ];
+          "tokio" = [ "dep:tokio" ];
+          "tokio-stream" = [ "dep:tokio-stream" ];
+          "trace" = [ "opentelemetry/trace" "crossbeam-channel" "rand" "async-trait" "percent-encoding" ];
+          "url" = [ "dep:url" ];
+        };
+        resolvedDefaultFeatures = [ "async-trait" "crossbeam-channel" "glob" "metrics" "percent-encoding" "rand" "trace" ];
+      };
+      "opentelemetry_sdk 0.24.1" = rec {
+        crateName = "opentelemetry_sdk";
+        version = "0.24.1";
+        edition = "2021";
+        sha256 = "1pr8phigsfki77wh79g6vz6flnhgnr09pm18g2hj83y81r4sqbk9";
+        dependencies = [
+          {
+            name = "async-trait";
+            packageId = "async-trait";
+            optional = true;
+          }
+          {
+            name = "futures-channel";
+            packageId = "futures-channel";
+          }
+          {
+            name = "futures-executor";
+            packageId = "futures-executor";
+          }
+          {
+            name = "futures-util";
+            packageId = "futures-util";
+            usesDefaultFeatures = false;
+            features = [ "std" "sink" "async-await-macro" ];
+          }
+          {
+            name = "glob";
+            packageId = "glob";
+            optional = true;
+          }
+          {
+            name = "once_cell";
+            packageId = "once_cell";
+          }
+          {
+            name = "opentelemetry";
+            packageId = "opentelemetry 0.24.0";
+          }
+          {
+            name = "percent-encoding";
+            packageId = "percent-encoding";
+            optional = true;
+          }
+          {
+            name = "rand";
+            packageId = "rand";
+            optional = true;
+            usesDefaultFeatures = false;
+            features = [ "std" "std_rng" "small_rng" ];
+          }
+          {
+            name = "serde_json";
+            packageId = "serde_json";
+            optional = true;
+          }
+          {
+            name = "thiserror";
+            packageId = "thiserror";
+            usesDefaultFeatures = false;
+          }
           {
             name = "tokio";
             packageId = "tokio";
@@ -6947,12 +8030,11 @@ rec {
         features = {
           "async-std" = [ "dep:async-std" ];
           "async-trait" = [ "dep:async-trait" ];
-          "crossbeam-channel" = [ "dep:crossbeam-channel" ];
-          "default" = [ "trace" ];
+          "default" = [ "trace" "metrics" "logs" ];
           "glob" = [ "dep:glob" ];
           "http" = [ "dep:http" ];
           "jaeger_remote_sampler" = [ "trace" "opentelemetry-http" "http" "serde" "serde_json" "url" ];
-          "logs" = [ "opentelemetry/logs" "crossbeam-channel" "async-trait" "serde_json" ];
+          "logs" = [ "opentelemetry/logs" "async-trait" "serde_json" ];
           "logs_level_enabled" = [ "logs" "opentelemetry/logs_level_enabled" ];
           "metrics" = [ "opentelemetry/metrics" "glob" "async-trait" ];
           "opentelemetry-http" = [ "dep:opentelemetry-http" ];
@@ -6966,16 +8048,17 @@ rec {
           "testing" = [ "opentelemetry/testing" "trace" "metrics" "logs" "rt-async-std" "rt-tokio" "rt-tokio-current-thread" "tokio/macros" "tokio/rt-multi-thread" ];
           "tokio" = [ "dep:tokio" ];
           "tokio-stream" = [ "dep:tokio-stream" ];
-          "trace" = [ "opentelemetry/trace" "crossbeam-channel" "rand" "async-trait" "percent-encoding" ];
+          "trace" = [ "opentelemetry/trace" "rand" "async-trait" "percent-encoding" ];
           "url" = [ "dep:url" ];
         };
-        resolvedDefaultFeatures = [ "async-trait" "crossbeam-channel" "default" "glob" "metrics" "percent-encoding" "rand" "rt-tokio" "tokio" "tokio-stream" "trace" ];
+        resolvedDefaultFeatures = [ "async-trait" "default" "glob" "logs" "metrics" "percent-encoding" "rand" "rt-tokio" "serde_json" "tokio" "tokio-stream" "trace" ];
       };
       "ordered-float" = rec {
         crateName = "ordered-float";
         version = "4.2.0";
         edition = "2021";
         sha256 = "0kjqcvvbcsibbx3hnj7ag06bd9gv2zfi5ja6rgyh2kbxbh3zfvd7";
+        libName = "ordered_float";
         authors = [
           "Jonathan Reem <jonathan.reem@gmail.com>"
           "Matt Brubeck <mbrubeck@limpet.net>"
@@ -7087,11 +8170,11 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" ];
       };
-      "parking_lot 0.12.2" = rec {
+      "parking_lot 0.12.3" = rec {
         crateName = "parking_lot";
-        version = "0.12.2";
+        version = "0.12.3";
         edition = "2021";
-        sha256 = "1ys2dzz6cysjmwyivwxczl1ljpcf5cj4qmhdj07d5bkc9z5g0jky";
+        sha256 = "09ws9g6245iiq8z975h8ycf818a66q3c6zv4b5h8skpm7hc1igzi";
         authors = [
           "Amanieu d'Antras <amanieu@gmail.com>"
         ];
@@ -7204,6 +8287,7 @@ rec {
         version = "0.1.0";
         edition = "2015";
         sha256 = "1pcgqxw0mgg3ha5hi5xkjhyjf488bw5rw1g3qlr9awbq4szh3fpc";
+        libName = "path_clean";
         authors = [
           "Dan Reeves <hey@danreev.es>"
         ];
@@ -7214,6 +8298,7 @@ rec {
         version = "2.3.1";
         edition = "2018";
         sha256 = "0gi8wgx0dcy8rnv1kywdv98lwcx67hz0a0zwpib5v2i08r88y573";
+        libName = "percent_encoding";
         authors = [
           "The rust-url developers"
         ];
@@ -7225,9 +8310,9 @@ rec {
       };
       "petgraph" = rec {
         crateName = "petgraph";
-        version = "0.6.4";
+        version = "0.6.5";
         edition = "2018";
-        sha256 = "1ac6wfq5f5pzcv0nvzzfgjbwg2kwslpnzsw5wcmxlscfcb9azlz1";
+        sha256 = "1ns7mbxidnn2pqahbbjccxkrqkrll2i5rbxx43ns6rh6fn3cridl";
         authors = [
           "bluss"
           "mitchmindtree"
@@ -7244,9 +8329,10 @@ rec {
           }
         ];
         features = {
-          "all" = [ "unstable" "quickcheck" "matrix_graph" "stable_graph" "graphmap" ];
+          "all" = [ "unstable" "quickcheck" "matrix_graph" "stable_graph" "graphmap" "rayon" ];
           "default" = [ "graphmap" "stable_graph" "matrix_graph" ];
           "quickcheck" = [ "dep:quickcheck" ];
+          "rayon" = [ "dep:rayon" "indexmap/rayon" ];
           "serde" = [ "dep:serde" ];
           "serde-1" = [ "serde" "serde_derive" ];
           "serde_derive" = [ "dep:serde_derive" ];
@@ -7259,6 +8345,7 @@ rec {
         version = "1.1.3";
         edition = "2021";
         sha256 = "08k4cpy8q3j93qqgnrbzkcgpn7g0a88l4a9nm33kyghpdhffv97x";
+        libName = "pin_project";
         dependencies = [
           {
             name = "pin-project-internal";
@@ -7273,6 +8360,7 @@ rec {
         edition = "2021";
         sha256 = "01a4l3vb84brv9v7wl71chzxra2kynm6yvcjca66xv3ij6fgsna3";
         procMacro = true;
+        libName = "pin_project_internal";
         dependencies = [
           {
             name = "proc-macro2";
@@ -7284,7 +8372,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             features = [ "full" "visit-mut" ];
           }
         ];
@@ -7292,9 +8380,10 @@ rec {
       };
       "pin-project-lite" = rec {
         crateName = "pin-project-lite";
-        version = "0.2.13";
+        version = "0.2.14";
         edition = "2018";
-        sha256 = "0n0bwr5qxlf0mhn2xkl36sy55118s9qmvx2yl5f3ixkb007lbywa";
+        sha256 = "00nx3f04agwjlsmd3mc5rx5haibj2v8q9b52b0kwn63wcv4nz9mx";
+        libName = "pin_project_lite";
 
       };
       "pin-utils" = rec {
@@ -7302,6 +8391,7 @@ rec {
         version = "0.1.0";
         edition = "2018";
         sha256 = "117ir7vslsl2z1a7qzhws4pd01cg2d3338c47swjyvqv2n60v1wb";
+        libName = "pin_utils";
         authors = [
           "Josef Brandl <mail@josefbrandl.de>"
         ];
@@ -7381,6 +8471,7 @@ rec {
         version = "0.3.29";
         edition = "2015";
         sha256 = "1jy6158v1316khkpmq2sjj1vgbnbnw51wffx7p0k0l9h9vlys019";
+        libName = "pkg_config";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -7466,6 +8557,7 @@ rec {
         version = "0.3.5";
         edition = "2018";
         sha256 = "02cn98gsj2i1bwrfsymifmyas1wn2gibdm9mk8w82x9s9n5n4xly";
+        libName = "plotters_backend";
         authors = [
           "Hao Hou <haohou302@gmail.com>"
         ];
@@ -7476,6 +8568,7 @@ rec {
         version = "0.3.5";
         edition = "2018";
         sha256 = "1axbw82frs5di4drbyzihr5j35wpy2a75hp3f49p186cjfcd7xiq";
+        libName = "plotters_svg";
         authors = [
           "Hao Hou <haohou302@gmail.com>"
         ];
@@ -7535,6 +8628,19 @@ rec {
         ];
 
       };
+      "portable-atomic" = rec {
+        crateName = "portable-atomic";
+        version = "1.6.0";
+        edition = "2018";
+        sha256 = "1h77x9qx7pns0d66vdrmdbmwpi7586h7ysnkdnhrn5mwi2cyyw3i";
+        libName = "portable_atomic";
+        features = {
+          "critical-section" = [ "dep:critical-section" ];
+          "default" = [ "fallback" ];
+          "serde" = [ "dep:serde" ];
+        };
+        resolvedDefaultFeatures = [ "default" "fallback" ];
+      };
       "powerfmt" = rec {
         crateName = "powerfmt";
         version = "0.2.0";
@@ -7554,6 +8660,7 @@ rec {
         version = "0.2.17";
         edition = "2018";
         sha256 = "1pp6g52aw970adv3x2310n7glqnji96z0a9wiamzw89ibf0ayh2v";
+        libName = "ppv_lite86";
         authors = [
           "The CryptoCorrosion Contributors"
         ];
@@ -7604,7 +8711,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             usesDefaultFeatures = false;
             features = [ "full" ];
           }
@@ -7617,7 +8724,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             usesDefaultFeatures = false;
             features = [ "parsing" ];
           }
@@ -7628,9 +8735,10 @@ rec {
       };
       "proc-macro2" = rec {
         crateName = "proc-macro2";
-        version = "1.0.76";
+        version = "1.0.86";
         edition = "2021";
-        sha256 = "136cp0fgl6rg5ljm3b1xpc0bn0lyvagzzmxvbxgk5hxml36mdz4m";
+        sha256 = "0xrv22p8lqlfdf1w0pj4si8n2ws4aw0kilmziwf0vpv5ys6rwway";
+        libName = "proc_macro2";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
           "Alex Crichton <alex@alexcrichton.com>"
@@ -7648,26 +8756,16 @@ rec {
       };
       "proptest" = rec {
         crateName = "proptest";
-        version = "1.4.0";
+        version = "1.5.0";
         edition = "2018";
-        sha256 = "1gzmw40pgmwzb7x6jsyr88z5w151snv5rp1g0dlcp1iw3h9pdd1i";
+        sha256 = "13gm7mphs95cw4gbgk5qiczkmr68dvcwhp58gmiz33dq2ccm3hml";
         authors = [
           "Jason Lingle"
         ];
         dependencies = [
           {
-            name = "bit-set";
-            packageId = "bit-set";
-            optional = true;
-          }
-          {
-            name = "bit-vec";
-            packageId = "bit-vec";
-            optional = true;
-          }
-          {
             name = "bitflags";
-            packageId = "bitflags 2.4.2";
+            packageId = "bitflags 2.6.0";
           }
           {
             name = "lazy_static";
@@ -7701,12 +8799,6 @@ rec {
             optional = true;
           }
           {
-            name = "rusty-fork";
-            packageId = "rusty-fork";
-            optional = true;
-            usesDefaultFeatures = false;
-          }
-          {
             name = "tempfile";
             packageId = "tempfile";
             optional = true;
@@ -7717,12 +8809,14 @@ rec {
           }
         ];
         features = {
+          "attr-macro" = [ "proptest-macro" ];
           "bit-set" = [ "dep:bit-set" "dep:bit-vec" ];
           "default" = [ "std" "fork" "timeout" "bit-set" ];
           "default-code-coverage" = [ "std" "fork" "timeout" "bit-set" ];
           "fork" = [ "std" "rusty-fork" "tempfile" ];
           "hardware-rng" = [ "x86" ];
           "lazy_static" = [ "dep:lazy_static" ];
+          "proptest-macro" = [ "dep:proptest-macro" ];
           "regex-syntax" = [ "dep:regex-syntax" ];
           "rusty-fork" = [ "dep:rusty-fork" ];
           "std" = [ "rand/std" "lazy_static" "regex-syntax" "num-traits/std" ];
@@ -7730,16 +8824,17 @@ rec {
           "timeout" = [ "fork" "rusty-fork/timeout" ];
           "x86" = [ "dep:x86" ];
         };
-        resolvedDefaultFeatures = [ "alloc" "bit-set" "default" "fork" "lazy_static" "regex-syntax" "rusty-fork" "std" "tempfile" "timeout" ];
+        resolvedDefaultFeatures = [ "alloc" "lazy_static" "regex-syntax" "std" "tempfile" ];
       };
       "prost" = rec {
         crateName = "prost";
-        version = "0.12.3";
+        version = "0.13.1";
         edition = "2021";
-        sha256 = "0jmrhlb4jkiylz72xb14vlkfbmlq0jwv7j20ini9harhvaf2hv0l";
+        sha256 = "1k1k4g4is0h80c648scs4spyi9r7pi1xid264hgcd276zp9v6gg1";
         authors = [
           "Dan Burkert <dan@danburkert.com>"
-          "Lucio Franco <luciofranco14@gmail.com"
+          "Lucio Franco <luciofranco14@gmail.com>"
+          "Casper Meijn <casper@meijn.net>"
           "Tokio Contributors <team@tokio.rs>"
         ];
         dependencies = [
@@ -7755,19 +8850,22 @@ rec {
           }
         ];
         features = {
-          "default" = [ "prost-derive" "std" ];
-          "prost-derive" = [ "dep:prost-derive" ];
+          "default" = [ "derive" "std" ];
+          "derive" = [ "dep:prost-derive" ];
+          "prost-derive" = [ "derive" ];
         };
-        resolvedDefaultFeatures = [ "default" "prost-derive" "std" ];
+        resolvedDefaultFeatures = [ "default" "derive" "prost-derive" "std" ];
       };
       "prost-build" = rec {
         crateName = "prost-build";
-        version = "0.12.3";
+        version = "0.13.1";
         edition = "2021";
-        sha256 = "1lp2l1l65l163yggk9nw5mjb2fqwzz12693af5phn1v0abih4pn5";
+        sha256 = "1hbp7cam99q56qf2wp8inwfh59adkzgf676hi07hgpbi1xc85cav";
+        libName = "prost_build";
         authors = [
           "Dan Burkert <dan@danburkert.com>"
           "Lucio Franco <luciofranco14@gmail.com>"
+          "Casper Meijn <casper@meijn.net>"
           "Tokio Contributors <team@tokio.rs>"
         ];
         dependencies = [
@@ -7778,11 +8876,11 @@ rec {
           }
           {
             name = "heck";
-            packageId = "heck";
+            packageId = "heck 0.4.1";
           }
           {
             name = "itertools";
-            packageId = "itertools 0.11.0";
+            packageId = "itertools 0.10.5";
             usesDefaultFeatures = false;
             features = [ "use_alloc" ];
           }
@@ -7838,7 +8936,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             optional = true;
             features = [ "full" ];
           }
@@ -7846,31 +8944,25 @@ rec {
             name = "tempfile";
             packageId = "tempfile";
           }
-          {
-            name = "which";
-            packageId = "which 4.4.2";
-          }
         ];
         features = {
-          "cleanup-markdown" = [ "pulldown-cmark" "pulldown-cmark-to-cmark" ];
+          "cleanup-markdown" = [ "dep:pulldown-cmark" "dep:pulldown-cmark-to-cmark" ];
           "default" = [ "format" ];
-          "format" = [ "prettyplease" "syn" ];
-          "prettyplease" = [ "dep:prettyplease" ];
-          "pulldown-cmark" = [ "dep:pulldown-cmark" ];
-          "pulldown-cmark-to-cmark" = [ "dep:pulldown-cmark-to-cmark" ];
-          "syn" = [ "dep:syn" ];
+          "format" = [ "dep:prettyplease" "dep:syn" ];
         };
-        resolvedDefaultFeatures = [ "cleanup-markdown" "default" "format" "prettyplease" "pulldown-cmark" "pulldown-cmark-to-cmark" "syn" ];
+        resolvedDefaultFeatures = [ "cleanup-markdown" "default" "format" ];
       };
       "prost-derive" = rec {
         crateName = "prost-derive";
-        version = "0.12.3";
+        version = "0.13.1";
         edition = "2021";
-        sha256 = "03l4yf6pdjvc4sgbvln2srq1avzm1ai86zni4hhqxvqxvnhwkdpg";
+        sha256 = "1jng0kwkwiih777f77kn6ja7hdvi7sxbg11nndwagsy4mnqckghq";
         procMacro = true;
+        libName = "prost_derive";
         authors = [
           "Dan Burkert <dan@danburkert.com>"
           "Lucio Franco <luciofranco14@gmail.com>"
+          "Casper Meijn <casper@meijn.net>"
           "Tokio Contributors <team@tokio.rs>"
         ];
         dependencies = [
@@ -7880,9 +8972,7 @@ rec {
           }
           {
             name = "itertools";
-            packageId = "itertools 0.11.0";
-            usesDefaultFeatures = false;
-            features = [ "use_alloc" ];
+            packageId = "itertools 0.10.5";
           }
           {
             name = "proc-macro2";
@@ -7894,7 +8984,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             features = [ "extra-traits" ];
           }
         ];
@@ -7902,12 +8992,14 @@ rec {
       };
       "prost-types" = rec {
         crateName = "prost-types";
-        version = "0.12.3";
+        version = "0.13.1";
         edition = "2021";
-        sha256 = "03j73llzljdxv9cdxp4m3vb9j3gh4y24rkbx48k3rx6wkvsrhf0r";
+        sha256 = "1hjww9k35c4fqjd75ziyjvyzl8kv9aqnw841ll64p7gl0n5idrff";
+        libName = "prost_types";
         authors = [
           "Dan Burkert <dan@danburkert.com>"
-          "Lucio Franco <luciofranco14@gmail.com"
+          "Lucio Franco <luciofranco14@gmail.com>"
+          "Casper Meijn <casper@meijn.net>"
           "Tokio Contributors <team@tokio.rs>"
         ];
         dependencies = [
@@ -7926,9 +9018,10 @@ rec {
       };
       "prost-wkt" = rec {
         crateName = "prost-wkt";
-        version = "0.5.0";
+        version = "0.6.0";
         edition = "2021";
-        sha256 = "0h3c0jyfpg7f3s9a3mx6xcif28j3ir5c5qmps88bknpiy31zk3jd";
+        sha256 = "16c2mbaq2hff51kwr204fncnmi0qx2zz4ff3pb1086qqxqmlxn58";
+        libName = "prost_wkt";
         authors = [
           "fdeantoni <fdeantoni@gmail.com>"
         ];
@@ -7968,16 +9061,17 @@ rec {
       };
       "prost-wkt-build" = rec {
         crateName = "prost-wkt-build";
-        version = "0.5.0";
+        version = "0.6.0";
         edition = "2021";
-        sha256 = "0883g26vrhx07kv0dq85559pj95zxs10lx042pp4za2clplwlcav";
+        sha256 = "054v5qqvdv29g5s1kr26zv0yvzc8gmnlx9k2dw6026g7rdd9srla";
+        libName = "prost_wkt_build";
         authors = [
           "fdeantoni <fdeantoni@gmail.com>"
         ];
         dependencies = [
           {
             name = "heck";
-            packageId = "heck";
+            packageId = "heck 0.4.1";
           }
           {
             name = "prost";
@@ -8000,9 +9094,10 @@ rec {
       };
       "prost-wkt-types" = rec {
         crateName = "prost-wkt-types";
-        version = "0.5.0";
+        version = "0.6.0";
         edition = "2021";
-        sha256 = "1vipmgvqqzr3hn9z5v85mx9zznzjwyfpjy8xzg2v94a0f2lf8ns3";
+        sha256 = "0r2gxf5b604b00v1fwif1rn5nm5xk4vb3ri29dhm9rl2kf70dvq1";
+        libName = "prost_wkt_types";
         authors = [
           "fdeantoni <fdeantoni@gmail.com>"
         ];
@@ -8071,6 +9166,7 @@ rec {
         edition = "2021";
         crateBin = [ ];
         sha256 = "0av876a31qvqhy7gzdg134zn4s10smlyi744mz9vrllkf906n82p";
+        libName = "pulldown_cmark";
         authors = [
           "Raph Levien <raph.levien@gmail.com>"
           "Marcus Klaas de Vries <mail@marcusklaas.nl>"
@@ -8078,7 +9174,7 @@ rec {
         dependencies = [
           {
             name = "bitflags";
-            packageId = "bitflags 2.4.2";
+            packageId = "bitflags 2.6.0";
           }
           {
             name = "memchr";
@@ -8100,6 +9196,7 @@ rec {
         version = "10.0.4";
         edition = "2018";
         sha256 = "0gc366cmd5jxal9m95l17rvqsm4dn62lywc8v5gwq8vcjvhyd501";
+        libName = "pulldown_cmark_to_cmark";
         authors = [
           "Sebastian Thiel <byronimo@gmail.com>"
           "Dylan Owen <dyltotheo@gmail.com>"
@@ -8115,22 +9212,12 @@ rec {
         ];
 
       };
-      "quick-error" = rec {
-        crateName = "quick-error";
-        version = "1.2.3";
-        edition = "2015";
-        sha256 = "1q6za3v78hsspisc197bg3g7rpc989qycy8ypr8ap8igv10ikl51";
-        authors = [
-          "Paul Colomiets <paul@colomiets.name>"
-          "Colin Kiegel <kiegel@gmx.de>"
-        ];
-
-      };
       "quick-xml" = rec {
         crateName = "quick-xml";
-        version = "0.31.0";
+        version = "0.36.1";
         edition = "2021";
-        sha256 = "0cravqanylzh5cq2v6hzlfqgxcid5nrp2snnb3pf4m0and2a610h";
+        sha256 = "1g0p8h4hxz2ymiyd71mn862nrj7s0g1wwiyahhdabpzxiqp5x84n";
+        libName = "quick_xml";
         dependencies = [
           {
             name = "memchr";
@@ -8155,11 +9242,180 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "overlapped-lists" "serde" "serialize" ];
       };
+      "quinn" = rec {
+        crateName = "quinn";
+        version = "0.11.2";
+        edition = "2021";
+        sha256 = "1bdkir6irdxld7xq2zd58scmm44w3vx1zswq6x0mfy6fpbmfxkp4";
+        dependencies = [
+          {
+            name = "bytes";
+            packageId = "bytes";
+          }
+          {
+            name = "pin-project-lite";
+            packageId = "pin-project-lite";
+          }
+          {
+            name = "quinn-proto";
+            packageId = "quinn-proto";
+            rename = "proto";
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "quinn-udp";
+            packageId = "quinn-udp";
+            rename = "udp";
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "rustc-hash";
+            packageId = "rustc-hash 1.1.0";
+          }
+          {
+            name = "rustls";
+            packageId = "rustls";
+            optional = true;
+            usesDefaultFeatures = false;
+            features = [ "ring" "std" ];
+          }
+          {
+            name = "thiserror";
+            packageId = "thiserror";
+          }
+          {
+            name = "tokio";
+            packageId = "tokio";
+            features = [ "sync" ];
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+          }
+        ];
+        devDependencies = [
+          {
+            name = "tokio";
+            packageId = "tokio";
+            features = [ "rt" "rt-multi-thread" "time" "macros" "sync" ];
+          }
+        ];
+        features = {
+          "async-io" = [ "dep:async-io" ];
+          "async-std" = [ "dep:async-std" ];
+          "default" = [ "log" "platform-verifier" "ring" "runtime-tokio" "rustls" ];
+          "futures-io" = [ "dep:futures-io" ];
+          "log" = [ "tracing/log" "proto/log" "udp/log" ];
+          "platform-verifier" = [ "proto/platform-verifier" ];
+          "ring" = [ "proto/ring" ];
+          "runtime-async-std" = [ "async-io" "async-std" ];
+          "runtime-smol" = [ "async-io" "smol" ];
+          "runtime-tokio" = [ "tokio/time" "tokio/rt" "tokio/net" ];
+          "rustls" = [ "dep:rustls" "proto/rustls" "proto/ring" ];
+          "smol" = [ "dep:smol" ];
+        };
+        resolvedDefaultFeatures = [ "ring" "runtime-tokio" "rustls" ];
+      };
+      "quinn-proto" = rec {
+        crateName = "quinn-proto";
+        version = "0.11.3";
+        edition = "2021";
+        sha256 = "1zhfcj6fffdgkqdhxzlr18hqmpwqshwbx9280h8bi78h7b01gxfx";
+        libName = "quinn_proto";
+        dependencies = [
+          {
+            name = "bytes";
+            packageId = "bytes";
+          }
+          {
+            name = "rand";
+            packageId = "rand";
+          }
+          {
+            name = "ring";
+            packageId = "ring";
+            optional = true;
+          }
+          {
+            name = "rustc-hash";
+            packageId = "rustc-hash 1.1.0";
+          }
+          {
+            name = "rustls";
+            packageId = "rustls";
+            optional = true;
+            usesDefaultFeatures = false;
+            features = [ "ring" "std" ];
+          }
+          {
+            name = "slab";
+            packageId = "slab";
+          }
+          {
+            name = "thiserror";
+            packageId = "thiserror";
+          }
+          {
+            name = "tinyvec";
+            packageId = "tinyvec";
+            features = [ "alloc" ];
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+          }
+        ];
+        features = {
+          "arbitrary" = [ "dep:arbitrary" ];
+          "default" = [ "rustls" "log" ];
+          "log" = [ "tracing/log" ];
+          "platform-verifier" = [ "dep:rustls-platform-verifier" ];
+          "ring" = [ "dep:ring" ];
+          "rustls" = [ "dep:rustls" "ring" ];
+        };
+        resolvedDefaultFeatures = [ "ring" "rustls" ];
+      };
+      "quinn-udp" = rec {
+        crateName = "quinn-udp";
+        version = "0.5.2";
+        edition = "2021";
+        sha256 = "0ilcrwrah36nqxh329p31ghj3rxm4sw5w4iy2kxwf3w68nf655lh";
+        libName = "quinn_udp";
+        dependencies = [
+          {
+            name = "libc";
+            packageId = "libc";
+          }
+          {
+            name = "once_cell";
+            packageId = "once_cell";
+            target = { target, features }: (target."windows" or false);
+          }
+          {
+            name = "socket2";
+            packageId = "socket2";
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+          }
+          {
+            name = "windows-sys";
+            packageId = "windows-sys 0.52.0";
+            target = { target, features }: (target."windows" or false);
+            features = [ "Win32_Foundation" "Win32_System_IO" "Win32_Networking_WinSock" ];
+          }
+        ];
+        features = {
+          "default" = [ "log" ];
+          "log" = [ "tracing/log" ];
+        };
+      };
       "quote" = rec {
         crateName = "quote";
-        version = "1.0.35";
+        version = "1.0.37";
         edition = "2018";
-        sha256 = "1vv8r2ncaz4pqdr78x7f138ka595sp2ncr1sa2plm4zxbsmwj7i9";
+        sha256 = "1brklraw2g34bxy9y4q1nbrccn7bv36ylihv12c9vlcii55x7fdm";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -8316,25 +9572,6 @@ rec {
           "serde1" = [ "serde" ];
         };
       };
-      "rand_xoshiro" = rec {
-        crateName = "rand_xoshiro";
-        version = "0.6.0";
-        edition = "2018";
-        sha256 = "1ajsic84rzwz5qr0mzlay8vi17swqi684bqvwqyiim3flfrcv5vg";
-        authors = [
-          "The Rand Project Developers"
-        ];
-        dependencies = [
-          {
-            name = "rand_core";
-            packageId = "rand_core";
-          }
-        ];
-        features = {
-          "serde" = [ "dep:serde" ];
-          "serde1" = [ "serde" ];
-        };
-      };
       "rayon" = rec {
         crateName = "rayon";
         version = "1.8.1";
@@ -8365,6 +9602,7 @@ rec {
         edition = "2021";
         links = "rayon-core";
         sha256 = "1qpwim68ai5h0j7axa8ai8z0payaawv3id0lrgkqmapx7lx8fr8l";
+        libName = "rayon_core";
         authors = [
           "Niko Matsakis <niko@alum.mit.edu>"
           "Josh Stone <cuviper@gmail.com>"
@@ -8383,6 +9621,34 @@ rec {
           "web_spin_lock" = [ "dep:wasm_sync" ];
         };
       };
+      "redb" = rec {
+        crateName = "redb";
+        version = "2.1.2";
+        edition = "2021";
+        sha256 = "1r8z96z3rnv3m80k9fikflq0dnf5c02br57z0phqlam55v1kscjq";
+        type = [ "cdylib" "rlib" ];
+        authors = [
+          "Christopher Berner <me@cberner.com>"
+        ];
+        dependencies = [
+          {
+            name = "libc";
+            packageId = "libc";
+            target = { target, features }: (target."unix" or false);
+          }
+        ];
+        devDependencies = [
+          {
+            name = "libc";
+            packageId = "libc";
+            target = { target, features }: (!("wasi" == target."os" or null));
+          }
+        ];
+        features = {
+          "logging" = [ "dep:log" ];
+          "python" = [ "dep:pyo3" "dep:pyo3-build-config" ];
+        };
+      };
       "redox_syscall 0.2.16" = rec {
         crateName = "redox_syscall";
         version = "0.2.16";
@@ -8475,9 +9741,9 @@ rec {
       };
       "regex" = rec {
         crateName = "regex";
-        version = "1.10.2";
+        version = "1.10.6";
         edition = "2021";
-        sha256 = "0hxkd814n4irind8im5c9am221ri6bprx49nc7yxv02ykhd9a2rq";
+        sha256 = "06cnlxwzyqfbw1za1i7ks89ns4i2kr0lpg5ykx56b8v7dd6df6a2";
         authors = [
           "The Rust Project Developers"
           "Andrew Gallant <jamslam@gmail.com>"
@@ -8487,15 +9753,17 @@ rec {
             name = "aho-corasick";
             packageId = "aho-corasick";
             optional = true;
+            usesDefaultFeatures = false;
           }
           {
             name = "memchr";
             packageId = "memchr";
             optional = true;
+            usesDefaultFeatures = false;
           }
           {
             name = "regex-automata";
-            packageId = "regex-automata 0.4.3";
+            packageId = "regex-automata 0.4.7";
             usesDefaultFeatures = false;
             features = [ "alloc" "syntax" "meta" "nfa-pikevm" ];
           }
@@ -8534,6 +9802,7 @@ rec {
         version = "0.1.10";
         edition = "2015";
         sha256 = "0ci1hvbzhrfby5fdpf4ganhf7kla58acad9i1ff1p34dzdrhs8vc";
+        libName = "regex_automata";
         authors = [
           "Andrew Gallant <jamslam@gmail.com>"
         ];
@@ -8553,11 +9822,12 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "regex-syntax" "std" ];
       };
-      "regex-automata 0.4.3" = rec {
+      "regex-automata 0.4.7" = rec {
         crateName = "regex-automata";
-        version = "0.4.3";
+        version = "0.4.7";
         edition = "2021";
-        sha256 = "0gs8q9yhd3kcg4pr00ag4viqxnh5l7jpyb9fsfr8hzh451w4r02z";
+        sha256 = "1pwjdi4jckpbaivpl6x4v5g4crb37zr2wac93wlfsbzgqn6gbjiq";
+        libName = "regex_automata";
         authors = [
           "The Rust Project Developers"
           "Andrew Gallant <jamslam@gmail.com>"
@@ -8618,6 +9888,7 @@ rec {
         version = "0.6.29";
         edition = "2018";
         sha256 = "1qgj49vm6y3zn1hi09x91jvgkl2b1fiaq402skj83280ggfwcqpi";
+        libName = "regex_syntax";
         authors = [
           "The Rust Project Developers"
         ];
@@ -8632,6 +9903,7 @@ rec {
         version = "0.8.2";
         edition = "2021";
         sha256 = "17rd2s8xbiyf6lb4aj2nfi44zqlj98g2ays8zzj2vfs743k79360";
+        libName = "regex_syntax";
         authors = [
           "The Rust Project Developers"
           "Andrew Gallant <jamslam@gmail.com>"
@@ -8648,6 +9920,7 @@ rec {
         version = "1.9.2";
         edition = "2021";
         sha256 = "1g0gc604zwm73gvpcicn8si25j9j5agqz50r0x1bkmgx6f7mi678";
+        libName = "relative_path";
         authors = [
           "John-John Tedro <udoprog@tedro.se>"
         ];
@@ -8658,27 +9931,22 @@ rec {
       };
       "reqwest" = rec {
         crateName = "reqwest";
-        version = "0.11.23";
-        edition = "2018";
-        sha256 = "0hgvzb7r46656r9vqhl5qk1kbr2xzjb91yr2cb321160ka6sxc9p";
+        version = "0.12.7";
+        edition = "2021";
+        sha256 = "0qsymmmgam6whjcymnlpf5kvk3ylc4bs92lygz63hp7g95b9bx7q";
         authors = [
           "Sean McArthur <sean@seanmonstar.com>"
         ];
         dependencies = [
           {
             name = "base64";
-            packageId = "base64";
+            packageId = "base64 0.22.1";
           }
           {
             name = "bytes";
             packageId = "bytes";
           }
           {
-            name = "encoding_rs";
-            packageId = "encoding_rs";
-            target = { target, features }: (!("wasm32" == target."arch" or null));
-          }
-          {
             name = "futures-core";
             packageId = "futures-core";
             usesDefaultFeatures = false;
@@ -8691,6 +9959,7 @@ rec {
           {
             name = "h2";
             packageId = "h2";
+            optional = true;
             target = { target, features }: (!("wasm32" == target."arch" or null));
           }
           {
@@ -8703,11 +9972,15 @@ rec {
             target = { target, features }: (!("wasm32" == target."arch" or null));
           }
           {
+            name = "http-body-util";
+            packageId = "http-body-util";
+            target = { target, features }: (!("wasm32" == target."arch" or null));
+          }
+          {
             name = "hyper";
             packageId = "hyper";
-            usesDefaultFeatures = false;
             target = { target, features }: (!("wasm32" == target."arch" or null));
-            features = [ "tcp" "http1" "http2" "client" "runtime" ];
+            features = [ "http1" "client" ];
           }
           {
             name = "hyper-rustls";
@@ -8715,6 +9988,13 @@ rec {
             optional = true;
             usesDefaultFeatures = false;
             target = { target, features }: (!("wasm32" == target."arch" or null));
+            features = [ "http1" "tls12" ];
+          }
+          {
+            name = "hyper-util";
+            packageId = "hyper-util";
+            target = { target, features }: (!("wasm32" == target."arch" or null));
+            features = [ "http1" "client" "client-legacy" "tokio" ];
           }
           {
             name = "ipnet";
@@ -8752,25 +10032,41 @@ rec {
             target = { target, features }: (!("wasm32" == target."arch" or null));
           }
           {
+            name = "quinn";
+            packageId = "quinn";
+            optional = true;
+            usesDefaultFeatures = false;
+            target = { target, features }: (!("wasm32" == target."arch" or null));
+            features = [ "rustls" "runtime-tokio" ];
+          }
+          {
             name = "rustls";
-            packageId = "rustls 0.21.12";
+            packageId = "rustls";
             optional = true;
+            usesDefaultFeatures = false;
             target = { target, features }: (!("wasm32" == target."arch" or null));
-            features = [ "dangerous_configuration" ];
+            features = [ "std" "tls12" ];
           }
           {
             name = "rustls-native-certs";
-            packageId = "rustls-native-certs 0.6.3";
+            packageId = "rustls-native-certs";
             optional = true;
             target = { target, features }: (!("wasm32" == target."arch" or null));
           }
           {
             name = "rustls-pemfile";
-            packageId = "rustls-pemfile 1.0.4";
+            packageId = "rustls-pemfile";
             optional = true;
             target = { target, features }: (!("wasm32" == target."arch" or null));
           }
           {
+            name = "rustls-pki-types";
+            packageId = "rustls-pki-types";
+            optional = true;
+            target = { target, features }: (!("wasm32" == target."arch" or null));
+            features = [ "alloc" ];
+          }
+          {
             name = "serde";
             packageId = "serde";
           }
@@ -8789,9 +10085,9 @@ rec {
             packageId = "serde_urlencoded";
           }
           {
-            name = "system-configuration";
-            packageId = "system-configuration";
-            target = { target, features }: ("macos" == target."os" or null);
+            name = "sync_wrapper";
+            packageId = "sync_wrapper 1.0.1";
+            features = [ "futures" ];
           }
           {
             name = "tokio";
@@ -8802,9 +10098,11 @@ rec {
           }
           {
             name = "tokio-rustls";
-            packageId = "tokio-rustls 0.24.1";
+            packageId = "tokio-rustls";
             optional = true;
+            usesDefaultFeatures = false;
             target = { target, features }: (!("wasm32" == target."arch" or null));
+            features = [ "tls12" ];
           }
           {
             name = "tokio-util";
@@ -8845,18 +10143,38 @@ rec {
             features = [ "AbortController" "AbortSignal" "Headers" "Request" "RequestInit" "RequestMode" "Response" "Window" "FormData" "Blob" "BlobPropertyBag" "ServiceWorkerGlobalScope" "RequestCredentials" "File" "ReadableStream" ];
           }
           {
-            name = "winreg";
-            packageId = "winreg";
+            name = "windows-registry";
+            packageId = "windows-registry";
             target = { target, features }: (target."windows" or false);
           }
         ];
         devDependencies = [
           {
+            name = "futures-util";
+            packageId = "futures-util";
+            usesDefaultFeatures = false;
+            target = { target, features }: (!("wasm32" == target."arch" or null));
+            features = [ "std" "alloc" ];
+          }
+          {
             name = "hyper";
             packageId = "hyper";
             usesDefaultFeatures = false;
             target = { target, features }: (!("wasm32" == target."arch" or null));
-            features = [ "tcp" "stream" "http1" "http2" "client" "server" "runtime" ];
+            features = [ "http1" "http2" "client" "server" ];
+          }
+          {
+            name = "hyper-util";
+            packageId = "hyper-util";
+            target = { target, features }: (!("wasm32" == target."arch" or null));
+            features = [ "http1" "http2" "client" "client-legacy" "server-auto" "tokio" ];
+          }
+          {
+            name = "rustls";
+            packageId = "rustls";
+            usesDefaultFeatures = false;
+            target = { target, features }: (!("wasm32" == target."arch" or null));
+            features = [ "ring" ];
           }
           {
             name = "serde";
@@ -8879,51 +10197,177 @@ rec {
           }
         ];
         features = {
-          "__rustls" = [ "hyper-rustls" "tokio-rustls" "rustls" "__tls" "rustls-pemfile" ];
-          "async-compression" = [ "dep:async-compression" ];
-          "blocking" = [ "futures-util/io" "tokio/rt-multi-thread" "tokio/sync" ];
-          "brotli" = [ "async-compression" "async-compression/brotli" "tokio-util" ];
-          "cookie_crate" = [ "dep:cookie_crate" ];
-          "cookie_store" = [ "dep:cookie_store" ];
-          "cookies" = [ "cookie_crate" "cookie_store" ];
-          "default" = [ "default-tls" ];
-          "default-tls" = [ "hyper-tls" "native-tls-crate" "__tls" "tokio-native-tls" ];
-          "deflate" = [ "async-compression" "async-compression/zlib" "tokio-util" ];
-          "futures-channel" = [ "dep:futures-channel" ];
-          "gzip" = [ "async-compression" "async-compression/gzip" "tokio-util" ];
-          "h3" = [ "dep:h3" ];
-          "h3-quinn" = [ "dep:h3-quinn" ];
-          "http3" = [ "rustls-tls-manual-roots" "h3" "h3-quinn" "quinn" "futures-channel" ];
-          "hyper-rustls" = [ "dep:hyper-rustls" ];
-          "hyper-tls" = [ "dep:hyper-tls" ];
-          "json" = [ "serde_json" ];
-          "mime_guess" = [ "dep:mime_guess" ];
-          "multipart" = [ "mime_guess" ];
+          "__rustls" = [ "dep:hyper-rustls" "dep:tokio-rustls" "dep:rustls" "__tls" "dep:rustls-pemfile" "dep:rustls-pki-types" ];
+          "__rustls-ring" = [ "hyper-rustls?/ring" "tokio-rustls?/ring" "rustls?/ring" "quinn?/ring" ];
+          "__tls" = [ "dep:rustls-pemfile" "tokio/io-util" ];
+          "blocking" = [ "dep:futures-channel" "futures-channel?/sink" "futures-util/io" "futures-util/sink" "tokio/sync" ];
+          "brotli" = [ "dep:async-compression" "async-compression?/brotli" "dep:tokio-util" ];
+          "charset" = [ "dep:encoding_rs" ];
+          "cookies" = [ "dep:cookie_crate" "dep:cookie_store" ];
+          "default" = [ "default-tls" "charset" "http2" "macos-system-configuration" ];
+          "default-tls" = [ "dep:hyper-tls" "dep:native-tls-crate" "__tls" "dep:tokio-native-tls" ];
+          "deflate" = [ "dep:async-compression" "async-compression?/zlib" "dep:tokio-util" ];
+          "gzip" = [ "dep:async-compression" "async-compression?/gzip" "dep:tokio-util" ];
+          "h2" = [ "dep:h2" ];
+          "hickory-dns" = [ "dep:hickory-resolver" ];
+          "http2" = [ "h2" "hyper/http2" "hyper-util/http2" "hyper-rustls?/http2" ];
+          "http3" = [ "rustls-tls-manual-roots" "dep:h3" "dep:h3-quinn" "dep:quinn" "dep:slab" "dep:futures-channel" ];
+          "json" = [ "dep:serde_json" ];
+          "macos-system-configuration" = [ "dep:system-configuration" ];
+          "multipart" = [ "dep:mime_guess" ];
           "native-tls" = [ "default-tls" ];
-          "native-tls-alpn" = [ "native-tls" "native-tls-crate/alpn" ];
-          "native-tls-crate" = [ "dep:native-tls-crate" ];
-          "native-tls-vendored" = [ "native-tls" "native-tls-crate/vendored" ];
-          "quinn" = [ "dep:quinn" ];
-          "rustls" = [ "dep:rustls" ];
-          "rustls-native-certs" = [ "dep:rustls-native-certs" ];
-          "rustls-pemfile" = [ "dep:rustls-pemfile" ];
+          "native-tls-alpn" = [ "native-tls" "native-tls-crate?/alpn" "hyper-tls?/alpn" ];
+          "native-tls-vendored" = [ "native-tls" "native-tls-crate?/vendored" ];
           "rustls-tls" = [ "rustls-tls-webpki-roots" ];
-          "rustls-tls-manual-roots" = [ "__rustls" ];
-          "rustls-tls-native-roots" = [ "rustls-native-certs" "__rustls" ];
-          "rustls-tls-webpki-roots" = [ "webpki-roots" "__rustls" ];
-          "serde_json" = [ "dep:serde_json" ];
-          "socks" = [ "tokio-socks" ];
-          "stream" = [ "tokio/fs" "tokio-util" "wasm-streams" ];
-          "tokio-native-tls" = [ "dep:tokio-native-tls" ];
-          "tokio-rustls" = [ "dep:tokio-rustls" ];
-          "tokio-socks" = [ "dep:tokio-socks" ];
-          "tokio-util" = [ "dep:tokio-util" ];
-          "trust-dns" = [ "trust-dns-resolver" ];
-          "trust-dns-resolver" = [ "dep:trust-dns-resolver" ];
-          "wasm-streams" = [ "dep:wasm-streams" ];
-          "webpki-roots" = [ "dep:webpki-roots" ];
+          "rustls-tls-manual-roots" = [ "__rustls" "__rustls-ring" ];
+          "rustls-tls-manual-roots-no-provider" = [ "__rustls" ];
+          "rustls-tls-native-roots" = [ "dep:rustls-native-certs" "hyper-rustls?/native-tokio" "__rustls" "__rustls-ring" ];
+          "rustls-tls-no-provider" = [ "rustls-tls-manual-roots-no-provider" ];
+          "rustls-tls-webpki-roots" = [ "dep:webpki-roots" "hyper-rustls?/webpki-tokio" "__rustls" "__rustls-ring" ];
+          "socks" = [ "dep:tokio-socks" ];
+          "stream" = [ "tokio/fs" "dep:tokio-util" "dep:wasm-streams" ];
+          "zstd" = [ "dep:async-compression" "async-compression?/zstd" "dep:tokio-util" ];
+        };
+        resolvedDefaultFeatures = [ "__rustls" "__rustls-ring" "__tls" "h2" "http2" "json" "rustls-tls-native-roots" "stream" ];
+      };
+      "reqwest-middleware" = rec {
+        crateName = "reqwest-middleware";
+        version = "0.3.3";
+        edition = "2018";
+        sha256 = "011b8n9a1bwalyk2y6x5s0wz52pxk70l4bbrba47qgsdc1dfnb2n";
+        libName = "reqwest_middleware";
+        authors = [
+          "Rodrigo Gryzinski <rodrigo.gryzinski@truelayer.com>"
+        ];
+        dependencies = [
+          {
+            name = "anyhow";
+            packageId = "anyhow";
+          }
+          {
+            name = "async-trait";
+            packageId = "async-trait";
+          }
+          {
+            name = "http";
+            packageId = "http";
+          }
+          {
+            name = "reqwest";
+            packageId = "reqwest";
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "serde";
+            packageId = "serde";
+          }
+          {
+            name = "thiserror";
+            packageId = "thiserror";
+          }
+          {
+            name = "tower-service";
+            packageId = "tower-service";
+          }
+        ];
+        devDependencies = [
+          {
+            name = "reqwest";
+            packageId = "reqwest";
+            features = [ "rustls-tls" ];
+          }
+        ];
+        features = {
+          "charset" = [ "reqwest/charset" ];
+          "http2" = [ "reqwest/http2" ];
+          "json" = [ "reqwest/json" ];
+          "multipart" = [ "reqwest/multipart" ];
+          "rustls-tls" = [ "reqwest/rustls-tls" ];
         };
-        resolvedDefaultFeatures = [ "__rustls" "__tls" "hyper-rustls" "json" "rustls" "rustls-native-certs" "rustls-pemfile" "rustls-tls-native-roots" "serde_json" "stream" "tokio-rustls" "tokio-util" "wasm-streams" ];
+      };
+      "reqwest-tracing" = rec {
+        crateName = "reqwest-tracing";
+        version = "0.5.3";
+        edition = "2018";
+        sha256 = "0igb5hp1mdr8jb5qwj26dphykvydxy1piawrvpc368n7ckx9ppdz";
+        libName = "reqwest_tracing";
+        authors = [
+          "Rodrigo Gryzinski <rodrigo.gryzinski@truelayer.com>"
+        ];
+        dependencies = [
+          {
+            name = "anyhow";
+            packageId = "anyhow";
+          }
+          {
+            name = "async-trait";
+            packageId = "async-trait";
+          }
+          {
+            name = "getrandom";
+            packageId = "getrandom";
+            target = { target, features }: ("wasm32" == target."arch" or null);
+            features = [ "js" ];
+          }
+          {
+            name = "http";
+            packageId = "http";
+          }
+          {
+            name = "matchit";
+            packageId = "matchit 0.8.4";
+          }
+          {
+            name = "opentelemetry";
+            packageId = "opentelemetry 0.22.0";
+            rename = "opentelemetry_0_22_pkg";
+            optional = true;
+          }
+          {
+            name = "reqwest";
+            packageId = "reqwest";
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "reqwest-middleware";
+            packageId = "reqwest-middleware";
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+          }
+          {
+            name = "tracing-opentelemetry";
+            packageId = "tracing-opentelemetry 0.23.0";
+            rename = "tracing-opentelemetry_0_23_pkg";
+            optional = true;
+          }
+        ];
+        devDependencies = [
+          {
+            name = "reqwest";
+            packageId = "reqwest";
+            features = [ "rustls-tls" ];
+          }
+        ];
+        features = {
+          "opentelemetry_0_20" = [ "opentelemetry_0_20_pkg" "tracing-opentelemetry_0_21_pkg" ];
+          "opentelemetry_0_20_pkg" = [ "dep:opentelemetry_0_20_pkg" ];
+          "opentelemetry_0_21" = [ "opentelemetry_0_21_pkg" "tracing-opentelemetry_0_22_pkg" ];
+          "opentelemetry_0_21_pkg" = [ "dep:opentelemetry_0_21_pkg" ];
+          "opentelemetry_0_22" = [ "opentelemetry_0_22_pkg" "tracing-opentelemetry_0_23_pkg" ];
+          "opentelemetry_0_22_pkg" = [ "dep:opentelemetry_0_22_pkg" ];
+          "opentelemetry_0_23" = [ "opentelemetry_0_23_pkg" "tracing-opentelemetry_0_24_pkg" ];
+          "opentelemetry_0_23_pkg" = [ "dep:opentelemetry_0_23_pkg" ];
+          "opentelemetry_0_24" = [ "opentelemetry_0_24_pkg" "tracing-opentelemetry_0_25_pkg" ];
+          "opentelemetry_0_24_pkg" = [ "dep:opentelemetry_0_24_pkg" ];
+          "tracing-opentelemetry_0_21_pkg" = [ "dep:tracing-opentelemetry_0_21_pkg" ];
+          "tracing-opentelemetry_0_22_pkg" = [ "dep:tracing-opentelemetry_0_22_pkg" ];
+          "tracing-opentelemetry_0_23_pkg" = [ "dep:tracing-opentelemetry_0_23_pkg" ];
+          "tracing-opentelemetry_0_24_pkg" = [ "dep:tracing-opentelemetry_0_24_pkg" ];
+          "tracing-opentelemetry_0_25_pkg" = [ "dep:tracing-opentelemetry_0_25_pkg" ];
+        };
+        resolvedDefaultFeatures = [ "opentelemetry_0_22" "opentelemetry_0_22_pkg" "tracing-opentelemetry_0_23_pkg" ];
       };
       "ring" = rec {
         crateName = "ring";
@@ -9026,7 +10470,7 @@ rec {
           }
           {
             name = "rustc-hash";
-            packageId = "rustc-hash";
+            packageId = "rustc-hash 1.1.0";
           }
           {
             name = "text-size";
@@ -9111,7 +10555,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             features = [ "full" "parsing" "extra-traits" "visit" "visit-mut" ];
           }
           {
@@ -9150,7 +10594,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             features = [ "full" "extra-traits" ];
           }
         ];
@@ -9167,6 +10611,7 @@ rec {
         version = "0.1.23";
         edition = "2015";
         sha256 = "0xnbk2bmyzshacjm2g1kd4zzv2y2az14bw3sjccq5qkpmsfvn9nn";
+        libName = "rustc_demangle";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -9176,16 +10621,32 @@ rec {
           "rustc-dep-of-std" = [ "core" "compiler_builtins" ];
         };
       };
-      "rustc-hash" = rec {
+      "rustc-hash 1.1.0" = rec {
         crateName = "rustc-hash";
         version = "1.1.0";
         edition = "2015";
         sha256 = "1qkc5khrmv5pqi5l5ca9p5nl5hs742cagrndhbrlk3dhlrx3zm08";
+        libName = "rustc_hash";
+        authors = [
+          "The Rust Project Developers"
+        ];
+        features = {
+          "default" = [ "std" ];
+        };
+        resolvedDefaultFeatures = [ "default" "std" ];
+      };
+      "rustc-hash 2.0.0" = rec {
+        crateName = "rustc-hash";
+        version = "2.0.0";
+        edition = "2021";
+        sha256 = "0lni0lf846bzrf3jvci6jaf4142n1mdqxvcpczk5ch9pfgyk8c2q";
+        libName = "rustc_hash";
         authors = [
           "The Rust Project Developers"
         ];
         features = {
           "default" = [ "std" ];
+          "rand" = [ "dep:rand" "std" ];
         };
         resolvedDefaultFeatures = [ "default" "std" ];
       };
@@ -9208,9 +10669,9 @@ rec {
       };
       "rustix" = rec {
         crateName = "rustix";
-        version = "0.38.30";
+        version = "0.38.35";
         edition = "2021";
-        sha256 = "1jkb6bzrj2w9ffy35aw4q04mqk1yxqw35fz80x0c4cxgi9c988rj";
+        sha256 = "0vy38cpprg64i6kfwz0w5hj2lqgliyimnx6vmplninir499m0pd8";
         authors = [
           "Dan Gohman <dev@sunfishcode.online>"
           "Jakub Konka <kubkon@jakubkonka.com>"
@@ -9218,7 +10679,7 @@ rec {
         dependencies = [
           {
             name = "bitflags";
-            packageId = "bitflags 2.4.2";
+            packageId = "bitflags 2.6.0";
             usesDefaultFeatures = false;
           }
           {
@@ -9249,14 +10710,12 @@ rec {
             optional = true;
             usesDefaultFeatures = false;
             target = { target, features }: ((!(target."rustix_use_libc" or false)) && (!(target."miri" or false)) && ("linux" == target."os" or null) && ("little" == target."endian" or null) && (("arm" == target."arch" or null) || (("aarch64" == target."arch" or null) && ("64" == target."pointer_width" or null)) || ("riscv64" == target."arch" or null) || ((target."rustix_use_experimental_asm" or false) && ("powerpc64" == target."arch" or null)) || ((target."rustix_use_experimental_asm" or false) && ("mips" == target."arch" or null)) || ((target."rustix_use_experimental_asm" or false) && ("mips32r6" == target."arch" or null)) || ((target."rustix_use_experimental_asm" or false) && ("mips64" == target."arch" or null)) || ((target."rustix_use_experimental_asm" or false) && ("mips64r6" == target."arch" or null)) || ("x86" == target."arch" or null) || (("x86_64" == target."arch" or null) && ("64" == target."pointer_width" or null))));
-            features = [ "extra_traits" ];
           }
           {
             name = "libc";
             packageId = "libc";
             usesDefaultFeatures = false;
             target = { target, features }: ((!(target."windows" or false)) && ((target."rustix_use_libc" or false) || (target."miri" or false) || (!(("linux" == target."os" or null) && ("little" == target."endian" or null) && (("arm" == target."arch" or null) || (("aarch64" == target."arch" or null) && ("64" == target."pointer_width" or null)) || ("riscv64" == target."arch" or null) || ((target."rustix_use_experimental_asm" or false) && ("powerpc64" == target."arch" or null)) || ((target."rustix_use_experimental_asm" or false) && ("mips" == target."arch" or null)) || ((target."rustix_use_experimental_asm" or false) && ("mips32r6" == target."arch" or null)) || ((target."rustix_use_experimental_asm" or false) && ("mips64" == target."arch" or null)) || ((target."rustix_use_experimental_asm" or false) && ("mips64r6" == target."arch" or null)) || ("x86" == target."arch" or null) || (("x86_64" == target."arch" or null) && ("64" == target."pointer_width" or null)))))));
-            features = [ "extra_traits" ];
           }
           {
             name = "linux-raw-sys";
@@ -9293,10 +10752,13 @@ rec {
         ];
         features = {
           "all-apis" = [ "event" "fs" "io_uring" "mm" "mount" "net" "param" "pipe" "process" "procfs" "pty" "rand" "runtime" "shm" "stdio" "system" "termios" "thread" "time" ];
+          "compiler_builtins" = [ "dep:compiler_builtins" ];
+          "core" = [ "dep:core" ];
           "default" = [ "std" "use-libc-auxv" ];
           "io_uring" = [ "event" "fs" "net" "linux-raw-sys/io_uring" ];
           "itoa" = [ "dep:itoa" ];
           "libc" = [ "dep:libc" ];
+          "libc-extra-traits" = [ "libc?/extra_traits" ];
           "libc_errno" = [ "dep:libc_errno" ];
           "linux_latest" = [ "linux_4_11" ];
           "net" = [ "linux-raw-sys/net" "linux-raw-sys/netlink" "linux-raw-sys/if_ether" "linux-raw-sys/xdp" ];
@@ -9306,20 +10768,21 @@ rec {
           "procfs" = [ "once_cell" "itoa" "fs" ];
           "pty" = [ "itoa" "fs" ];
           "runtime" = [ "linux-raw-sys/prctl" ];
-          "rustc-dep-of-std" = [ "dep:core" "dep:alloc" "dep:compiler_builtins" "linux-raw-sys/rustc-dep-of-std" "bitflags/rustc-dep-of-std" "compiler_builtins?/rustc-dep-of-std" ];
+          "rustc-dep-of-std" = [ "core" "rustc-std-workspace-alloc" "compiler_builtins" "linux-raw-sys/rustc-dep-of-std" "bitflags/rustc-dep-of-std" "compiler_builtins?/rustc-dep-of-std" ];
+          "rustc-std-workspace-alloc" = [ "dep:rustc-std-workspace-alloc" ];
           "shm" = [ "fs" ];
-          "std" = [ "bitflags/std" "alloc" "libc?/std" "libc_errno?/std" ];
+          "std" = [ "bitflags/std" "alloc" "libc?/std" "libc_errno?/std" "libc-extra-traits" ];
           "system" = [ "linux-raw-sys/system" ];
           "thread" = [ "linux-raw-sys/prctl" ];
-          "use-libc" = [ "libc_errno" "libc" ];
+          "use-libc" = [ "libc_errno" "libc" "libc-extra-traits" ];
         };
-        resolvedDefaultFeatures = [ "alloc" "default" "event" "fs" "net" "pipe" "process" "std" "termios" "time" "use-libc-auxv" ];
+        resolvedDefaultFeatures = [ "alloc" "default" "event" "fs" "libc-extra-traits" "net" "pipe" "process" "std" "termios" "time" "use-libc-auxv" ];
       };
-      "rustls 0.21.12" = rec {
+      "rustls" = rec {
         crateName = "rustls";
-        version = "0.21.12";
+        version = "0.23.7";
         edition = "2021";
-        sha256 = "0gjdg2a9r81sdwkyw3n5yfbkrr6p9gyk3xr2kcsr3cs83x6s2miz";
+        sha256 = "0yv7bh16rwhn6fnlb3wnixb2y4in9gf3z3ysa8k3zbgh3nbdpfzb";
         dependencies = [
           {
             name = "log";
@@ -9327,45 +10790,10 @@ rec {
             optional = true;
           }
           {
-            name = "ring";
-            packageId = "ring";
-          }
-          {
-            name = "rustls-webpki";
-            packageId = "rustls-webpki 0.101.7";
-            rename = "webpki";
-            features = [ "alloc" "std" ];
-          }
-          {
-            name = "sct";
-            packageId = "sct";
-          }
-        ];
-        devDependencies = [
-          {
-            name = "log";
-            packageId = "log";
-          }
-        ];
-        features = {
-          "default" = [ "logging" "tls12" ];
-          "log" = [ "dep:log" ];
-          "logging" = [ "log" ];
-          "read_buf" = [ "rustversion" ];
-          "rustversion" = [ "dep:rustversion" ];
-        };
-        resolvedDefaultFeatures = [ "dangerous_configuration" "default" "log" "logging" "tls12" ];
-      };
-      "rustls 0.22.4" = rec {
-        crateName = "rustls";
-        version = "0.22.4";
-        edition = "2021";
-        sha256 = "0cl4q6w0x1cl5ldjsgbbiiqhkz6qg5vxl5dkn9wwsyxc44vzfkmz";
-        dependencies = [
-          {
-            name = "log";
-            packageId = "log";
-            optional = true;
+            name = "once_cell";
+            packageId = "once_cell";
+            usesDefaultFeatures = false;
+            features = [ "alloc" "race" ];
           }
           {
             name = "ring";
@@ -9376,14 +10804,14 @@ rec {
             name = "rustls-pki-types";
             packageId = "rustls-pki-types";
             rename = "pki-types";
-            features = [ "std" ];
+            features = [ "alloc" ];
           }
           {
             name = "rustls-webpki";
-            packageId = "rustls-webpki 0.102.2";
+            packageId = "rustls-webpki";
             rename = "webpki";
             usesDefaultFeatures = false;
-            features = [ "std" ];
+            features = [ "alloc" ];
           }
           {
             name = "subtle";
@@ -9402,49 +10830,26 @@ rec {
           }
         ];
         features = {
+          "aws-lc-rs" = [ "aws_lc_rs" ];
           "aws_lc_rs" = [ "dep:aws-lc-rs" "webpki/aws_lc_rs" ];
-          "default" = [ "logging" "ring" "tls12" ];
+          "default" = [ "aws_lc_rs" "logging" "std" "tls12" ];
+          "fips" = [ "aws_lc_rs" "aws-lc-rs?/fips" ];
+          "hashbrown" = [ "dep:hashbrown" ];
           "log" = [ "dep:log" ];
           "logging" = [ "log" ];
-          "read_buf" = [ "rustversion" ];
+          "read_buf" = [ "rustversion" "std" ];
           "ring" = [ "dep:ring" "webpki/ring" ];
           "rustversion" = [ "dep:rustversion" ];
+          "std" = [ "webpki/std" "pki-types/std" "once_cell/std" ];
         };
-        resolvedDefaultFeatures = [ "log" "logging" "ring" "tls12" ];
-      };
-      "rustls-native-certs 0.6.3" = rec {
-        crateName = "rustls-native-certs";
-        version = "0.6.3";
-        edition = "2021";
-        sha256 = "007zind70rd5rfsrkdcfm8vn09j8sg02phg9334kark6rdscxam9";
-        dependencies = [
-          {
-            name = "openssl-probe";
-            packageId = "openssl-probe";
-            target = { target, features }: ((target."unix" or false) && (!("macos" == target."os" or null)));
-          }
-          {
-            name = "rustls-pemfile";
-            packageId = "rustls-pemfile 1.0.4";
-          }
-          {
-            name = "schannel";
-            packageId = "schannel";
-            target = { target, features }: (target."windows" or false);
-          }
-          {
-            name = "security-framework";
-            packageId = "security-framework";
-            target = { target, features }: ("macos" == target."os" or null);
-          }
-        ];
-
+        resolvedDefaultFeatures = [ "log" "logging" "ring" "std" "tls12" ];
       };
-      "rustls-native-certs 0.7.0" = rec {
+      "rustls-native-certs" = rec {
         crateName = "rustls-native-certs";
         version = "0.7.0";
         edition = "2021";
         sha256 = "14ip15dcr6fmjzi12lla9cpln7mmkdid4a7wsp344v4kz9gbh7wg";
+        libName = "rustls_native_certs";
         dependencies = [
           {
             name = "openssl-probe";
@@ -9453,7 +10858,7 @@ rec {
           }
           {
             name = "rustls-pemfile";
-            packageId = "rustls-pemfile 2.1.0";
+            packageId = "rustls-pemfile";
           }
           {
             name = "rustls-pki-types";
@@ -9473,28 +10878,16 @@ rec {
         ];
 
       };
-      "rustls-pemfile 1.0.4" = rec {
-        crateName = "rustls-pemfile";
-        version = "1.0.4";
-        edition = "2018";
-        sha256 = "1324n5bcns0rnw6vywr5agff3rwfvzphi7rmbyzwnv6glkhclx0w";
-        dependencies = [
-          {
-            name = "base64";
-            packageId = "base64";
-          }
-        ];
-
-      };
-      "rustls-pemfile 2.1.0" = rec {
+      "rustls-pemfile" = rec {
         crateName = "rustls-pemfile";
         version = "2.1.0";
         edition = "2018";
         sha256 = "02y7qn9d93ri4hrm72yw4zqlbxch6ma045nyazmdrppw6jvkncrw";
+        libName = "rustls_pemfile";
         dependencies = [
           {
             name = "base64";
-            packageId = "base64";
+            packageId = "base64 0.21.7";
             usesDefaultFeatures = false;
             features = [ "alloc" ];
           }
@@ -9515,37 +10908,14 @@ rec {
         version = "1.3.1";
         edition = "2021";
         sha256 = "1a0g7453h07701vyxjj05gv903a0shi43mf7hl3cdd08hsr6gpjy";
+        libName = "rustls_pki_types";
         features = {
           "default" = [ "alloc" ];
           "std" = [ "alloc" ];
         };
         resolvedDefaultFeatures = [ "alloc" "default" "std" ];
       };
-      "rustls-webpki 0.101.7" = rec {
-        crateName = "rustls-webpki";
-        version = "0.101.7";
-        edition = "2021";
-        sha256 = "0rapfhpkqp75552i8r0y7f4vq7csb4k7gjjans0df73sxv8paqlb";
-        libName = "webpki";
-        dependencies = [
-          {
-            name = "ring";
-            packageId = "ring";
-            usesDefaultFeatures = false;
-          }
-          {
-            name = "untrusted";
-            packageId = "untrusted";
-          }
-        ];
-        features = {
-          "alloc" = [ "ring/alloc" ];
-          "default" = [ "std" ];
-          "std" = [ "alloc" ];
-        };
-        resolvedDefaultFeatures = [ "alloc" "default" "std" ];
-      };
-      "rustls-webpki 0.102.2" = rec {
+      "rustls-webpki" = rec {
         crateName = "rustls-webpki";
         version = "0.102.2";
         edition = "2021";
@@ -9590,40 +10960,6 @@ rec {
         ];
 
       };
-      "rusty-fork" = rec {
-        crateName = "rusty-fork";
-        version = "0.3.0";
-        edition = "2018";
-        sha256 = "0kxwq5c480gg6q0j3bg4zzyfh2kwmc3v2ba94jw8ncjc8mpcqgfb";
-        authors = [
-          "Jason Lingle"
-        ];
-        dependencies = [
-          {
-            name = "fnv";
-            packageId = "fnv";
-          }
-          {
-            name = "quick-error";
-            packageId = "quick-error";
-          }
-          {
-            name = "tempfile";
-            packageId = "tempfile";
-          }
-          {
-            name = "wait-timeout";
-            packageId = "wait-timeout";
-            optional = true;
-          }
-        ];
-        features = {
-          "default" = [ "timeout" ];
-          "timeout" = [ "wait-timeout" ];
-          "wait-timeout" = [ "dep:wait-timeout" ];
-        };
-        resolvedDefaultFeatures = [ "timeout" "wait-timeout" ];
-      };
       "rustyline" = rec {
         crateName = "rustyline";
         version = "10.1.1";
@@ -9735,6 +11071,7 @@ rec {
         version = "1.0.6";
         edition = "2018";
         sha256 = "00h5j1w87dmhnvbv9l8bic3y7xxsnjmssvifw2ayvgx9mb1ivz4k";
+        libName = "same_file";
         authors = [
           "Andrew Gallant <jamslam@gmail.com>"
         ];
@@ -9772,6 +11109,17 @@ rec {
         ];
 
       };
+      "scoped-tls" = rec {
+        crateName = "scoped-tls";
+        version = "1.0.1";
+        edition = "2015";
+        sha256 = "15524h04mafihcvfpgxd8f4bgc3k95aclz8grjkg9a0rxcvn9kz1";
+        libName = "scoped_tls";
+        authors = [
+          "Alex Crichton <alex@alexcrichton.com>"
+        ];
+
+      };
       "scopeguard" = rec {
         crateName = "scopeguard";
         version = "1.2.0";
@@ -9785,31 +11133,12 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "use_std" ];
       };
-      "sct" = rec {
-        crateName = "sct";
-        version = "0.7.1";
-        edition = "2021";
-        sha256 = "056lmi2xkzdg1dbai6ha3n57s18cbip4pnmpdhyljli3m99n216s";
-        authors = [
-          "Joseph Birr-Pixton <jpixton@gmail.com>"
-        ];
-        dependencies = [
-          {
-            name = "ring";
-            packageId = "ring";
-          }
-          {
-            name = "untrusted";
-            packageId = "untrusted";
-          }
-        ];
-
-      };
       "security-framework" = rec {
         crateName = "security-framework";
         version = "2.9.2";
         edition = "2021";
         sha256 = "1pplxk15s5yxvi2m1sz5xfmjibp96cscdcl432w9jzbk0frlzdh5";
+        libName = "security_framework";
         authors = [
           "Steven Fackler <sfackler@gmail.com>"
           "Kornel <kornel@geekhood.net>"
@@ -9856,6 +11185,7 @@ rec {
         version = "2.9.1";
         edition = "2021";
         sha256 = "0yhciwlsy9dh0ps1gw3197kvyqx1bvc4knrhiznhid6kax196cp9";
+        libName = "security_framework_sys";
         authors = [
           "Steven Fackler <sfackler@gmail.com>"
           "Kornel <kornel@geekhood.net>"
@@ -9897,9 +11227,9 @@ rec {
       };
       "serde" = rec {
         crateName = "serde";
-        version = "1.0.197";
+        version = "1.0.209";
         edition = "2018";
-        sha256 = "1qjcxqd3p4yh5cmmax9q4ics1zy34j5ij32cvjj5dc5rw5rwic9z";
+        sha256 = "029yqqbb3c8v3gc720fhxn49dhgvb88zbyprdg5621riwzzy1z4r";
         authors = [
           "Erick Tryzelaar <erick.tryzelaar@gmail.com>"
           "David Tolnay <dtolnay@gmail.com>"
@@ -9931,9 +11261,9 @@ rec {
       };
       "serde_derive" = rec {
         crateName = "serde_derive";
-        version = "1.0.197";
+        version = "1.0.209";
         edition = "2015";
-        sha256 = "02v1x0sdv8qy06lpr6by4ar1n3jz3hmab15cgimpzhgd895v7c3y";
+        sha256 = "0w114ksg1ymnmqdisd0g1j3g8jgz6pam45xg6yb47dfpkybip0x5";
         procMacro = true;
         authors = [
           "Erick Tryzelaar <erick.tryzelaar@gmail.com>"
@@ -9954,7 +11284,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             usesDefaultFeatures = false;
             features = [ "clone-impls" "derive" "parsing" "printing" "proc-macro" ];
           }
@@ -10000,7 +11330,27 @@ rec {
           "preserve_order" = [ "indexmap" "std" ];
           "std" = [ "serde/std" ];
         };
-        resolvedDefaultFeatures = [ "alloc" "default" "std" ];
+        resolvedDefaultFeatures = [ "alloc" "default" "raw_value" "std" ];
+      };
+      "serde_path_to_error" = rec {
+        crateName = "serde_path_to_error";
+        version = "0.1.16";
+        edition = "2021";
+        sha256 = "19hlz2359l37ifirskpcds7sxg0gzpqvfilibs7whdys0128i6dg";
+        authors = [
+          "David Tolnay <dtolnay@gmail.com>"
+        ];
+        dependencies = [
+          {
+            name = "itoa";
+            packageId = "itoa";
+          }
+          {
+            name = "serde";
+            packageId = "serde";
+          }
+        ];
+
       };
       "serde_qs" = rec {
         crateName = "serde_qs";
@@ -10042,9 +11392,9 @@ rec {
       };
       "serde_spanned" = rec {
         crateName = "serde_spanned";
-        version = "0.6.5";
+        version = "0.6.7";
         edition = "2021";
-        sha256 = "1hgh6s3jjwyzhfk3xwb6pnnr1misq9nflwq0f026jafi37s24dpb";
+        sha256 = "0v9h2nlg8r7n7dkbgj1aw59g35kl869l652wc6zi2f4zawqinnzb";
         dependencies = [
           {
             name = "serde";
@@ -10063,6 +11413,32 @@ rec {
         };
         resolvedDefaultFeatures = [ "serde" ];
       };
+      "serde_tagged" = rec {
+        crateName = "serde_tagged";
+        version = "0.3.0";
+        edition = "2015";
+        sha256 = "1scr98aw9d9hf9bf0gr5fcmhkwsz0fpy2wr2zi5r4cnfya6j9kbn";
+        authors = [
+          "qzed <qzed@users.noreply.github.com>"
+        ];
+        dependencies = [
+          {
+            name = "erased-serde";
+            packageId = "erased-serde";
+            optional = true;
+          }
+          {
+            name = "serde";
+            packageId = "serde";
+          }
+        ];
+        features = {
+          "default" = [ "erased" ];
+          "erased" = [ "erased-serde" ];
+          "erased-serde" = [ "dep:erased-serde" ];
+        };
+        resolvedDefaultFeatures = [ "default" "erased" "erased-serde" ];
+      };
       "serde_urlencoded" = rec {
         crateName = "serde_urlencoded";
         version = "0.7.1";
@@ -10093,9 +11469,9 @@ rec {
       };
       "serde_with" = rec {
         crateName = "serde_with";
-        version = "3.7.0";
+        version = "3.9.0";
         edition = "2021";
-        sha256 = "16jn72cij27fxjafcsma1z5p587xkk8wqhp2yv98zy5vc7iv107f";
+        sha256 = "0mxqyh2qzq5xi8pnv9647337pz107yjv3ck7x9b229s892lwzkk9";
         authors = [
           "Jonas Bushart"
           "Marcin Kaลบmierczak"
@@ -10103,7 +11479,7 @@ rec {
         dependencies = [
           {
             name = "base64";
-            packageId = "base64";
+            packageId = "base64 0.22.1";
             optional = true;
             usesDefaultFeatures = false;
           }
@@ -10200,9 +11576,9 @@ rec {
       };
       "serde_with_macros" = rec {
         crateName = "serde_with_macros";
-        version = "3.7.0";
+        version = "3.9.0";
         edition = "2021";
-        sha256 = "0mbnika5bw1mvgnl50rs7wfzj7dwxzgwqxnq6656694j38bdqqb5";
+        sha256 = "0l1kfkzj46can1rwfspmnan8shqr0prlmbaig9hp9wpl3scy9zm8";
         procMacro = true;
         authors = [
           "Jonas Bushart"
@@ -10222,7 +11598,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             features = [ "extra-traits" "full" "parsing" ];
           }
         ];
@@ -10312,6 +11688,7 @@ rec {
         version = "0.1.7";
         edition = "2018";
         sha256 = "1xipjr4nqsgw34k7a2cgj9zaasl2ds6jwn89886kww93d32a637l";
+        libName = "sharded_slab";
         authors = [
           "Eliza Weisman <eliza@buoyant.io>"
         ];
@@ -10325,11 +11702,30 @@ rec {
           "loom" = [ "dep:loom" ];
         };
       };
+      "shlex" = rec {
+        crateName = "shlex";
+        version = "1.3.0";
+        edition = "2015";
+        sha256 = "0r1y6bv26c1scpxvhg2cabimrmwgbp4p3wy6syj9n0c4s3q2znhg";
+        authors = [
+          "comex <comexk@gmail.com>"
+          "Fenhl <fenhl@fenhl.net>"
+          "Adrian Taylor <adetaylor@chromium.org>"
+          "Alex Touchet <alextouchet@outlook.com>"
+          "Daniel Parks <dp+git@oxidized.org>"
+          "Garrett Berg <googberg@gmail.com>"
+        ];
+        features = {
+          "default" = [ "std" ];
+        };
+        resolvedDefaultFeatures = [ "default" "std" ];
+      };
       "signal-hook-registry" = rec {
         crateName = "signal-hook-registry";
         version = "1.4.1";
         edition = "2015";
         sha256 = "18crkkw5k82bvcx088xlf5g4n3772m24qhzgfan80nda7d3rn8nq";
+        libName = "signal_hook_registry";
         authors = [
           "Michal 'vorner' Vaner <vorner@vorner.cz>"
           "Masaki Hara <ackie.h.gmai@gmail.com>"
@@ -10463,12 +11859,13 @@ rec {
           "drain_keep_rest" = [ "drain_filter" ];
           "serde" = [ "dep:serde" ];
         };
+        resolvedDefaultFeatures = [ "const_generics" "const_new" ];
       };
       "smol_str" = rec {
         crateName = "smol_str";
-        version = "0.2.1";
+        version = "0.2.2";
         edition = "2018";
-        sha256 = "0jca0hyrwnv428q5gxhn2s8jsvrrkyrb0fyla9x37056mmimb176";
+        sha256 = "1bfylqf2vnqaglw58930vpxm2rfzji5gjp15a2c0kh8aj6v8ylyx";
         authors = [
           "Aleksey Kladov <aleksey.kladov@gmail.com>"
         ];
@@ -10538,13 +11935,14 @@ rec {
         edition = "2018";
         sha256 = "1gzy9rzggs090zf7hfvgp4lm1glrmg9qzh796686jnq7bxk7j04r";
         procMacro = true;
+        libName = "snafu_derive";
         authors = [
           "Jake Goulding <jake.goulding@gmail.com>"
         ];
         dependencies = [
           {
             name = "heck";
-            packageId = "heck";
+            packageId = "heck 0.4.1";
           }
           {
             name = "proc-macro2";
@@ -10660,6 +12058,7 @@ rec {
         version = "1.0.6";
         edition = "2018";
         sha256 = "1l7q4nha7wpsr0970bfqm773vhmpwr9l6rr8r4gwgrh46wvdh24y";
+        libName = "str_buf";
         authors = [
           "Douman <douman@gmx.se>"
         ];
@@ -10667,7 +12066,7 @@ rec {
           "serde" = [ "dep:serde" ];
         };
       };
-      "strsim" = rec {
+      "strsim 0.10.0" = rec {
         crateName = "strsim";
         version = "0.10.0";
         edition = "2015";
@@ -10677,6 +12076,17 @@ rec {
         ];
 
       };
+      "strsim 0.11.1" = rec {
+        crateName = "strsim";
+        version = "0.11.1";
+        edition = "2015";
+        sha256 = "0kzvqlw8hxqb7y598w1s0hxlnmi84sg5vsipp3yg5na5d1rvba3x";
+        authors = [
+          "Danny Guo <danny@dannyguo.com>"
+          "maxbachmann <oss@maxbachmann.de>"
+        ];
+
+      };
       "structmeta" = rec {
         crateName = "structmeta";
         version = "0.1.6";
@@ -10718,6 +12128,7 @@ rec {
         edition = "2021";
         sha256 = "14vxik2m3dm7bwx016qfz062fwznkbq02fyq8vby545m0pj0nhi4";
         procMacro = true;
+        libName = "structmeta_derive";
         authors = [
           "frozenlib"
         ];
@@ -10791,11 +12202,11 @@ rec {
         };
         resolvedDefaultFeatures = [ "clone-impls" "default" "derive" "extra-traits" "full" "parsing" "printing" "proc-macro" "quote" "visit" "visit-mut" ];
       };
-      "syn 2.0.48" = rec {
+      "syn 2.0.76" = rec {
         crateName = "syn";
-        version = "2.0.48";
+        version = "2.0.76";
         edition = "2021";
-        sha256 = "0gqgfygmrxmp8q32lia9p294kdd501ybn6kn2h4gqza0irik2d8g";
+        sha256 = "09fmdkmqqkkfkg53qnldl10ppwqkqlw22ixhg4rgrkp02hd0i3jp";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -10818,14 +12229,13 @@ rec {
         ];
         features = {
           "default" = [ "derive" "parsing" "printing" "clone-impls" "proc-macro" ];
-          "printing" = [ "quote" ];
-          "proc-macro" = [ "proc-macro2/proc-macro" "quote/proc-macro" ];
-          "quote" = [ "dep:quote" ];
+          "printing" = [ "dep:quote" ];
+          "proc-macro" = [ "proc-macro2/proc-macro" "quote?/proc-macro" ];
           "test" = [ "syn-test-suite/all-features" ];
         };
-        resolvedDefaultFeatures = [ "clone-impls" "default" "derive" "extra-traits" "full" "parsing" "printing" "proc-macro" "quote" "visit" "visit-mut" ];
+        resolvedDefaultFeatures = [ "clone-impls" "default" "derive" "extra-traits" "full" "parsing" "printing" "proc-macro" "visit" "visit-mut" ];
       };
-      "sync_wrapper" = rec {
+      "sync_wrapper 0.1.2" = rec {
         crateName = "sync_wrapper";
         version = "0.1.2";
         edition = "2018";
@@ -10838,49 +12248,27 @@ rec {
           "futures-core" = [ "dep:futures-core" ];
         };
       };
-      "system-configuration" = rec {
-        crateName = "system-configuration";
-        version = "0.5.1";
-        edition = "2021";
-        sha256 = "1rz0r30xn7fiyqay2dvzfy56cvaa3km74hnbz2d72p97bkf3lfms";
-        authors = [
-          "Mullvad VPN"
-        ];
-        dependencies = [
-          {
-            name = "bitflags";
-            packageId = "bitflags 1.3.2";
-          }
-          {
-            name = "core-foundation";
-            packageId = "core-foundation";
-          }
-          {
-            name = "system-configuration-sys";
-            packageId = "system-configuration-sys";
-          }
-        ];
-
-      };
-      "system-configuration-sys" = rec {
-        crateName = "system-configuration-sys";
-        version = "0.5.0";
-        edition = "2021";
-        sha256 = "1jckxvdr37bay3i9v52izgy52dg690x5xfg3hd394sv2xf4b2px7";
+      "sync_wrapper 1.0.1" = rec {
+        crateName = "sync_wrapper";
+        version = "1.0.1";
+        edition = "2018";
+        sha256 = "150k6lwvr4nl237ngsz8fj5j78k712m4bggrfyjsidllraz5l1m7";
         authors = [
-          "Mullvad VPN"
+          "Actyx AG <developer@actyx.io>"
         ];
         dependencies = [
           {
-            name = "core-foundation-sys";
-            packageId = "core-foundation-sys";
-          }
-          {
-            name = "libc";
-            packageId = "libc";
+            name = "futures-core";
+            packageId = "futures-core";
+            optional = true;
+            usesDefaultFeatures = false;
           }
         ];
-
+        features = {
+          "futures" = [ "futures-core" ];
+          "futures-core" = [ "dep:futures-core" ];
+        };
+        resolvedDefaultFeatures = [ "futures" "futures-core" ];
       };
       "tabwriter" = rec {
         crateName = "tabwriter";
@@ -10901,9 +12289,9 @@ rec {
       };
       "tempfile" = rec {
         crateName = "tempfile";
-        version = "3.9.0";
-        edition = "2018";
-        sha256 = "1ypkl7rvv57n16q28psxpb61rnyhmfaif12ascdnsyljm90l3kh1";
+        version = "3.12.0";
+        edition = "2021";
+        sha256 = "0r3sm3323crr50ranvask8z4qb3x5zfqxs1mrzab1swlqz8cvjq4";
         authors = [
           "Steven Allen <steven@stebalien.com>"
           "The Rust Project Developers"
@@ -10920,9 +12308,10 @@ rec {
             packageId = "fastrand";
           }
           {
-            name = "redox_syscall";
-            packageId = "redox_syscall 0.4.1";
-            target = { target, features }: ("redox" == target."os" or null);
+            name = "once_cell";
+            packageId = "once_cell";
+            usesDefaultFeatures = false;
+            features = [ "std" ];
           }
           {
             name = "rustix";
@@ -10932,7 +12321,7 @@ rec {
           }
           {
             name = "windows-sys";
-            packageId = "windows-sys 0.52.0";
+            packageId = "windows-sys 0.59.0";
             target = { target, features }: (target."windows" or false);
             features = [ "Win32_Storage_FileSystem" "Win32_Foundation" ];
           }
@@ -10962,6 +12351,7 @@ rec {
         edition = "2021";
         sha256 = "105lxqs0vnqff5821sgns8q1scvrwfx1yw6iz7i7nr862j6l1mk2";
         procMacro = true;
+        libName = "test_strategy";
         authors = [
           "frozenlib"
         ];
@@ -10991,6 +12381,7 @@ rec {
         version = "1.1.1";
         edition = "2018";
         sha256 = "0cwjbkl7w3xc8mnkhg1nwij6p5y2qkcfldgss8ddnawvhf3s32pi";
+        libName = "text_size";
         authors = [
           "Aleksey Kladov <aleksey.kladov@gmail.com>"
           "Christopher Durham (CAD97) <cad97@cad97.com>"
@@ -11001,9 +12392,9 @@ rec {
       };
       "thiserror" = rec {
         crateName = "thiserror";
-        version = "1.0.56";
+        version = "1.0.63";
         edition = "2021";
-        sha256 = "1b9hnzngjan4d89zjs16i01bcpcnvdwklyh73lj16xk28p37hhym";
+        sha256 = "092p83mf4p1vkjb2j6h6z96dan4raq2simhirjv12slbndq26d60";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -11017,10 +12408,11 @@ rec {
       };
       "thiserror-impl" = rec {
         crateName = "thiserror-impl";
-        version = "1.0.56";
+        version = "1.0.63";
         edition = "2021";
-        sha256 = "0w9ldp8fa574ilz4dn7y7scpcq66vdjy59qal8qdpwsh7faal3zs";
+        sha256 = "0qd21l2jjrkvnpr5da3l3b58v4wmrkn6aa0h1z5dg6kb8rc8nmd4";
         procMacro = true;
+        libName = "thiserror_impl";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -11035,7 +12427,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
           }
         ];
 
@@ -11060,11 +12452,29 @@ rec {
         ];
         features = { };
       };
+      "threadpool" = rec {
+        crateName = "threadpool";
+        version = "1.8.1";
+        edition = "2015";
+        sha256 = "1amgfyzvynbm8pacniivzq9r0fh3chhs7kijic81j76l6c5ycl6h";
+        authors = [
+          "The Rust Project Developers"
+          "Corey Farwell <coreyf@rwell.org>"
+          "Stefan Schindler <dns2utf8@estada.ch>"
+        ];
+        dependencies = [
+          {
+            name = "num_cpus";
+            packageId = "num_cpus";
+          }
+        ];
+
+      };
       "time" = rec {
         crateName = "time";
-        version = "0.3.34";
+        version = "0.3.36";
         edition = "2021";
-        sha256 = "0jc7wgprzqjhzd0nqkbmdlnjwyddnswmjw86ni2vq55v45jqn968";
+        sha256 = "11g8hdpahgrf1wwl2rpsg5nxq3aj7ri6xr672v4qcij6cgjqizax";
         authors = [
           "Jacob Pratt <open-source@jhpratt.dev>"
           "Time contributors"
@@ -11145,6 +12555,7 @@ rec {
         version = "0.1.2";
         edition = "2021";
         sha256 = "1wx3qizcihw6z151hywfzzyd1y5dl804ydyxci6qm07vbakpr4pg";
+        libName = "time_core";
         authors = [
           "Jacob Pratt <open-source@jhpratt.dev>"
           "Time contributors"
@@ -11153,10 +12564,11 @@ rec {
       };
       "time-macros" = rec {
         crateName = "time-macros";
-        version = "0.2.17";
+        version = "0.2.18";
         edition = "2021";
-        sha256 = "0x3pahhk2751c6kqqq9dk6lz0gydbnxr44q01wpjlrz687ps78vv";
+        sha256 = "1kqwxvfh2jkpg38fy673d6danh1bhcmmbsmffww3mphgail2l99z";
         procMacro = true;
+        libName = "time_macros";
         authors = [
           "Jacob Pratt <open-source@jhpratt.dev>"
           "Time contributors"
@@ -11233,9 +12645,9 @@ rec {
       };
       "tokio" = rec {
         crateName = "tokio";
-        version = "1.35.1";
+        version = "1.39.3";
         edition = "2021";
-        sha256 = "01613rkziqp812a288ga65aqygs254wgajdi57v8brivjkx4x6y8";
+        sha256 = "1xgzhj7bxqqpjaabjkgsx8hi0f600bzj4iyp9f0a9gr3k6dwkawv";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -11258,16 +12670,11 @@ rec {
           }
           {
             name = "mio";
-            packageId = "mio";
+            packageId = "mio 1.0.2";
             optional = true;
             usesDefaultFeatures = false;
           }
           {
-            name = "num_cpus";
-            packageId = "num_cpus";
-            optional = true;
-          }
-          {
             name = "pin-project-lite";
             packageId = "pin-project-lite";
           }
@@ -11291,7 +12698,7 @@ rec {
           }
           {
             name = "windows-sys";
-            packageId = "windows-sys 0.48.0";
+            packageId = "windows-sys 0.52.0";
             optional = true;
             target = { target, features }: (target."windows" or false);
           }
@@ -11309,7 +12716,7 @@ rec {
           }
           {
             name = "windows-sys";
-            packageId = "windows-sys 0.48.0";
+            packageId = "windows-sys 0.52.0";
             target = { target, features }: (target."windows" or false);
             features = [ "Win32_Foundation" "Win32_Security_Authorization" ];
           }
@@ -11322,10 +12729,9 @@ rec {
           "macros" = [ "tokio-macros" ];
           "mio" = [ "dep:mio" ];
           "net" = [ "libc" "mio/os-poll" "mio/os-ext" "mio/net" "socket2" "windows-sys/Win32_Foundation" "windows-sys/Win32_Security" "windows-sys/Win32_Storage_FileSystem" "windows-sys/Win32_System_Pipes" "windows-sys/Win32_System_SystemServices" ];
-          "num_cpus" = [ "dep:num_cpus" ];
           "parking_lot" = [ "dep:parking_lot" ];
           "process" = [ "bytes" "libc" "mio/os-poll" "mio/os-ext" "mio/net" "signal-hook-registry" "windows-sys/Win32_Foundation" "windows-sys/Win32_System_Threading" "windows-sys/Win32_System_WindowsProgramming" ];
-          "rt-multi-thread" = [ "num_cpus" "rt" ];
+          "rt-multi-thread" = [ "rt" ];
           "signal" = [ "libc" "mio/os-poll" "mio/net" "mio/os-ext" "signal-hook-registry" "windows-sys/Win32_Foundation" "windows-sys/Win32_System_Console" ];
           "signal-hook-registry" = [ "dep:signal-hook-registry" ];
           "socket2" = [ "dep:socket2" ];
@@ -11334,42 +12740,28 @@ rec {
           "tracing" = [ "dep:tracing" ];
           "windows-sys" = [ "dep:windows-sys" ];
         };
-        resolvedDefaultFeatures = [ "bytes" "default" "fs" "io-std" "io-util" "libc" "macros" "mio" "net" "num_cpus" "rt" "rt-multi-thread" "signal" "signal-hook-registry" "socket2" "sync" "test-util" "time" "tokio-macros" "windows-sys" ];
+        resolvedDefaultFeatures = [ "bytes" "default" "fs" "io-std" "io-util" "libc" "macros" "mio" "net" "rt" "rt-multi-thread" "signal" "signal-hook-registry" "socket2" "sync" "test-util" "time" "tokio-macros" "windows-sys" ];
       };
-      "tokio-io-timeout" = rec {
-        crateName = "tokio-io-timeout";
-        version = "1.2.0";
-        edition = "2018";
-        sha256 = "1gx84f92q1491vj4pkn81j8pz1s3pgwnbrsdhfsa2556mli41drh";
-        authors = [
-          "Steven Fackler <sfackler@gmail.com>"
-        ];
+      "tokio-listener" = rec {
+        crateName = "tokio-listener";
+        version = "0.4.3";
+        edition = "2021";
+        sha256 = "0iigg0w7n4r3ggcz0lj0vb2smq93dlwrqr06r1di54ij2afl6jli";
+        libName = "tokio_listener";
         dependencies = [
           {
-            name = "pin-project-lite";
-            packageId = "pin-project-lite";
-          }
-          {
-            name = "tokio";
-            packageId = "tokio";
-            features = [ "time" ];
+            name = "axum";
+            packageId = "axum";
+            rename = "axum07";
+            optional = true;
           }
-        ];
-        devDependencies = [
           {
-            name = "tokio";
-            packageId = "tokio";
-            features = [ "full" ];
+            name = "clap";
+            packageId = "clap";
+            optional = true;
+            usesDefaultFeatures = false;
+            features = [ "derive" "std" ];
           }
-        ];
-
-      };
-      "tokio-listener" = rec {
-        crateName = "tokio-listener";
-        version = "0.4.2";
-        edition = "2021";
-        sha256 = "1cm6r5dmpq96s8gw9dgsinq5g8s466j48dg7dckwc4gc28g6cd21";
-        dependencies = [
           {
             name = "document-features";
             packageId = "document-features";
@@ -11384,6 +12776,19 @@ rec {
             optional = true;
           }
           {
+            name = "hyper";
+            packageId = "hyper";
+            rename = "hyper1";
+            optional = true;
+            features = [ "server" ];
+          }
+          {
+            name = "hyper-util";
+            packageId = "hyper-util";
+            optional = true;
+            features = [ "server" "server-auto" ];
+          }
+          {
             name = "nix";
             packageId = "nix 0.26.4";
             optional = true;
@@ -11415,7 +12820,18 @@ rec {
           {
             name = "tonic";
             packageId = "tonic";
-            rename = "tonic";
+            rename = "tonic_012";
+            optional = true;
+          }
+          {
+            name = "tower";
+            packageId = "tower";
+            optional = true;
+            features = [ "util" ];
+          }
+          {
+            name = "tower-service";
+            packageId = "tower-service";
             optional = true;
           }
           {
@@ -11425,6 +12841,11 @@ rec {
         ];
         devDependencies = [
           {
+            name = "clap";
+            packageId = "clap";
+            features = [ "help" ];
+          }
+          {
             name = "tokio";
             packageId = "tokio";
             features = [ "macros" "rt" "io-util" ];
@@ -11445,18 +12866,20 @@ rec {
           "socket_options" = [ "socket2" ];
           "tokio-util" = [ "dep:tokio-util" ];
           "tonic010" = [ "dep:tonic_010" ];
-          "tonic011" = [ "dep:tonic" ];
+          "tonic011" = [ "dep:tonic_011" ];
+          "tonic012" = [ "dep:tonic_012" ];
           "unix_path_tools" = [ "nix" ];
           "user_facing_default" = [ "inetd" "unix" "unix_path_tools" "sd_listen" "socket_options" ];
         };
-        resolvedDefaultFeatures = [ "default" "inetd" "nix" "sd_listen" "socket2" "socket_options" "tokio-util" "tonic011" "unix" "unix_path_tools" "user_facing_default" ];
+        resolvedDefaultFeatures = [ "axum07" "clap" "default" "inetd" "multi-listener" "nix" "sd_listen" "socket2" "socket_options" "tokio-util" "tonic012" "unix" "unix_path_tools" "user_facing_default" ];
       };
       "tokio-macros" = rec {
         crateName = "tokio-macros";
-        version = "2.2.0";
+        version = "2.4.0";
         edition = "2021";
-        sha256 = "0fwjy4vdx1h9pi4g2nml72wi0fr27b5m954p13ji9anyy8l1x2jv";
+        sha256 = "0lnpg14h1v3fh2jvnc8cz7cjf0m7z1xgkwfpcyy632g829imjgb9";
         procMacro = true;
+        libName = "tokio_macros";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -11471,7 +12894,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             features = [ "full" ];
           }
         ];
@@ -11482,6 +12905,7 @@ rec {
         version = "0.3.0";
         edition = "2018";
         sha256 = "0kr1hnm5dmb9gfkby88yg2xj8g6x4i4gipva0c8ca3xyxhvfnmvz";
+        libName = "tokio_retry";
         authors = [
           "Sam Rijs <srijs@airpost.net>"
         ];
@@ -11509,48 +12933,18 @@ rec {
         ];
 
       };
-      "tokio-rustls 0.24.1" = rec {
+      "tokio-rustls" = rec {
         crateName = "tokio-rustls";
-        version = "0.24.1";
-        edition = "2018";
-        sha256 = "10bhibg57mqir7xjhb2xmf24xgfpx6fzpyw720a4ih8a737jg0y2";
-        dependencies = [
-          {
-            name = "rustls";
-            packageId = "rustls 0.21.12";
-            usesDefaultFeatures = false;
-          }
-          {
-            name = "tokio";
-            packageId = "tokio";
-          }
-        ];
-        devDependencies = [
-          {
-            name = "tokio";
-            packageId = "tokio";
-            features = [ "full" ];
-          }
-        ];
-        features = {
-          "dangerous_configuration" = [ "rustls/dangerous_configuration" ];
-          "default" = [ "logging" "tls12" ];
-          "logging" = [ "rustls/logging" ];
-          "secret_extraction" = [ "rustls/secret_extraction" ];
-          "tls12" = [ "rustls/tls12" ];
-        };
-        resolvedDefaultFeatures = [ "default" "logging" "tls12" ];
-      };
-      "tokio-rustls 0.25.0" = rec {
-        crateName = "tokio-rustls";
-        version = "0.25.0";
+        version = "0.26.0";
         edition = "2021";
-        sha256 = "03w6d5aqqf084rmcmrsyq5grhydl53blaiqcl0i2yfnv187hqpkp";
+        sha256 = "1m00czrmk8x7pdjnz10a3da3i1d0sdf9j9vfp5dnk5ss1q6w8yqc";
+        libName = "tokio_rustls";
         dependencies = [
           {
             name = "rustls";
-            packageId = "rustls 0.22.4";
+            packageId = "rustls";
             usesDefaultFeatures = false;
+            features = [ "std" ];
           }
           {
             name = "rustls-pki-types";
@@ -11570,18 +12964,22 @@ rec {
           }
         ];
         features = {
-          "default" = [ "logging" "tls12" "ring" ];
+          "aws-lc-rs" = [ "aws_lc_rs" ];
+          "aws_lc_rs" = [ "rustls/aws_lc_rs" ];
+          "default" = [ "logging" "tls12" "aws_lc_rs" ];
+          "fips" = [ "rustls/fips" ];
           "logging" = [ "rustls/logging" ];
           "ring" = [ "rustls/ring" ];
           "tls12" = [ "rustls/tls12" ];
         };
-        resolvedDefaultFeatures = [ "default" "logging" "ring" "tls12" ];
+        resolvedDefaultFeatures = [ "logging" "ring" "tls12" ];
       };
       "tokio-stream" = rec {
         crateName = "tokio-stream";
-        version = "0.1.14";
+        version = "0.1.15";
         edition = "2021";
-        sha256 = "0hi8hcwavh5sdi1ivc9qc4yvyr32f153c212dpd7sb366y6rhz1r";
+        sha256 = "1brpbsqyg8yfmfc4y0j9zxvc8xsxjc31d48kb0g6jvpc1fgchyi6";
+        libName = "tokio_stream";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -11625,6 +13023,7 @@ rec {
         version = "0.3.1";
         edition = "2018";
         sha256 = "0xffvap4g7hlswk5daklk3jaqha6s6wxw72c24kmqgna23018mwx";
+        libName = "tokio_tar";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
           "dignifiedquire <me@dignifiequire.com>"
@@ -11681,9 +13080,10 @@ rec {
       };
       "tokio-test" = rec {
         crateName = "tokio-test";
-        version = "0.4.3";
+        version = "0.4.4";
         edition = "2021";
-        sha256 = "06fplzcc2ymahfzykd2ickw2qn7g3lz47bll00865s1spnx3r6z8";
+        sha256 = "1xzri2m3dg8nzdyznm77nymvil9cyh1gfdfrbnska51iqfmvls14";
+        libName = "tokio_test";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -11721,9 +13121,10 @@ rec {
       };
       "tokio-util" = rec {
         crateName = "tokio-util";
-        version = "0.7.10";
+        version = "0.7.11";
         edition = "2021";
-        sha256 = "058y6x4mf0fsqji9rfyb77qbfyc50y4pk2spqgj6xsyr693z66al";
+        sha256 = "1qcz30db6m8lxkl61b3nic4bim1symi636nhbb3rmi3i6xxv9xlw";
+        libName = "tokio_util";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -11754,13 +13155,6 @@ rec {
             packageId = "tokio";
             features = [ "sync" ];
           }
-          {
-            name = "tracing";
-            packageId = "tracing";
-            optional = true;
-            usesDefaultFeatures = false;
-            features = [ "std" ];
-          }
         ];
         devDependencies = [
           {
@@ -11771,7 +13165,6 @@ rec {
         ];
         features = {
           "__docs_rs" = [ "futures-util" ];
-          "codec" = [ "tracing" ];
           "compat" = [ "futures-io" ];
           "full" = [ "codec" "compat" "io-util" "time" "net" "rt" ];
           "futures-io" = [ "dep:futures-io" ];
@@ -11784,9 +13177,9 @@ rec {
           "time" = [ "tokio/time" "slab" ];
           "tracing" = [ "dep:tracing" ];
         };
-        resolvedDefaultFeatures = [ "codec" "compat" "default" "futures-io" "io" "io-util" "net" "tracing" ];
+        resolvedDefaultFeatures = [ "codec" "compat" "default" "futures-io" "io" "io-util" "net" ];
       };
-      "toml" = rec {
+      "toml 0.6.0" = rec {
         crateName = "toml";
         version = "0.6.0";
         edition = "2021";
@@ -11806,12 +13199,12 @@ rec {
           }
           {
             name = "toml_datetime";
-            packageId = "toml_datetime";
+            packageId = "toml_datetime 0.5.1";
             features = [ "serde" ];
           }
           {
             name = "toml_edit";
-            packageId = "toml_edit";
+            packageId = "toml_edit 0.18.1";
             optional = true;
             features = [ "serde" ];
           }
@@ -11832,7 +13225,54 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "display" "parse" ];
       };
-      "toml_datetime" = rec {
+      "toml 0.8.19" = rec {
+        crateName = "toml";
+        version = "0.8.19";
+        edition = "2021";
+        sha256 = "0knjd3mkxyb87qcs2dark3qkpadidap3frqfj5nqvhpxwfc1zvd1";
+        authors = [
+          "Alex Crichton <alex@alexcrichton.com>"
+        ];
+        dependencies = [
+          {
+            name = "serde";
+            packageId = "serde";
+          }
+          {
+            name = "serde_spanned";
+            packageId = "serde_spanned";
+            features = [ "serde" ];
+          }
+          {
+            name = "toml_datetime";
+            packageId = "toml_datetime 0.6.8";
+            features = [ "serde" ];
+          }
+          {
+            name = "toml_edit";
+            packageId = "toml_edit 0.22.20";
+            optional = true;
+            usesDefaultFeatures = false;
+            features = [ "serde" ];
+          }
+        ];
+        devDependencies = [
+          {
+            name = "serde";
+            packageId = "serde";
+            features = [ "derive" ];
+          }
+        ];
+        features = {
+          "default" = [ "parse" "display" ];
+          "display" = [ "dep:toml_edit" "toml_edit?/display" ];
+          "indexmap" = [ "dep:indexmap" ];
+          "parse" = [ "dep:toml_edit" "toml_edit?/parse" ];
+          "preserve_order" = [ "indexmap" ];
+        };
+        resolvedDefaultFeatures = [ "default" "display" "parse" ];
+      };
+      "toml_datetime 0.5.1" = rec {
         crateName = "toml_datetime";
         version = "0.5.1";
         edition = "2021";
@@ -11852,7 +13292,27 @@ rec {
         };
         resolvedDefaultFeatures = [ "serde" ];
       };
-      "toml_edit" = rec {
+      "toml_datetime 0.6.8" = rec {
+        crateName = "toml_datetime";
+        version = "0.6.8";
+        edition = "2021";
+        sha256 = "0hgv7v9g35d7y9r2afic58jvlwnf73vgd1mz2k8gihlgrf73bmqd";
+        authors = [
+          "Alex Crichton <alex@alexcrichton.com>"
+        ];
+        dependencies = [
+          {
+            name = "serde";
+            packageId = "serde";
+            optional = true;
+          }
+        ];
+        features = {
+          "serde" = [ "dep:serde" ];
+        };
+        resolvedDefaultFeatures = [ "serde" ];
+      };
+      "toml_edit 0.18.1" = rec {
         crateName = "toml_edit";
         version = "0.18.1";
         edition = "2021";
@@ -11883,7 +13343,7 @@ rec {
           }
           {
             name = "toml_datetime";
-            packageId = "toml_datetime";
+            packageId = "toml_datetime 0.5.1";
           }
         ];
         features = {
@@ -11893,11 +13353,55 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "serde" ];
       };
+      "toml_edit 0.22.20" = rec {
+        crateName = "toml_edit";
+        version = "0.22.20";
+        edition = "2021";
+        sha256 = "07ffw4626k6abicjxb2idh12f1p5fn965zk660zhqsyj5b048g2q";
+        authors = [
+          "Andronik Ordian <write@reusable.software>"
+          "Ed Page <eopage@gmail.com>"
+        ];
+        dependencies = [
+          {
+            name = "indexmap";
+            packageId = "indexmap 2.1.0";
+            features = [ "std" ];
+          }
+          {
+            name = "serde";
+            packageId = "serde";
+            optional = true;
+          }
+          {
+            name = "serde_spanned";
+            packageId = "serde_spanned";
+            optional = true;
+            features = [ "serde" ];
+          }
+          {
+            name = "toml_datetime";
+            packageId = "toml_datetime 0.6.8";
+          }
+          {
+            name = "winnow";
+            packageId = "winnow";
+            optional = true;
+          }
+        ];
+        features = {
+          "default" = [ "parse" "display" ];
+          "parse" = [ "dep:winnow" ];
+          "perf" = [ "dep:kstring" ];
+          "serde" = [ "dep:serde" "toml_datetime/serde" "dep:serde_spanned" ];
+        };
+        resolvedDefaultFeatures = [ "display" "parse" "serde" ];
+      };
       "tonic" = rec {
         crateName = "tonic";
-        version = "0.11.0";
+        version = "0.12.2";
         edition = "2021";
-        sha256 = "04qsr527i256i3dk9dp1g2jr42q7yl91y5h06rvd9ycy9rxfpi3n";
+        sha256 = "1bc8m8r7ysgkb7mhs3b3mvivd43nwaix6qnqhfp5hb2bkscbmxn6";
         authors = [
           "Lucio Franco <luciofranco14@gmail.com>"
         ];
@@ -11920,7 +13424,7 @@ rec {
           }
           {
             name = "base64";
-            packageId = "base64";
+            packageId = "base64 0.22.1";
           }
           {
             name = "bytes";
@@ -11940,10 +13444,14 @@ rec {
             packageId = "http-body";
           }
           {
+            name = "http-body-util";
+            packageId = "http-body-util";
+          }
+          {
             name = "hyper";
             packageId = "hyper";
             optional = true;
-            features = [ "full" ];
+            features = [ "http1" "http2" ];
           }
           {
             name = "hyper-timeout";
@@ -11951,6 +13459,12 @@ rec {
             optional = true;
           }
           {
+            name = "hyper-util";
+            packageId = "hyper-util";
+            optional = true;
+            features = [ "tokio" ];
+          }
+          {
             name = "percent-encoding";
             packageId = "percent-encoding";
           }
@@ -11967,38 +13481,43 @@ rec {
           }
           {
             name = "rustls-native-certs";
-            packageId = "rustls-native-certs 0.7.0";
+            packageId = "rustls-native-certs";
             optional = true;
           }
           {
             name = "rustls-pemfile";
-            packageId = "rustls-pemfile 2.1.0";
+            packageId = "rustls-pemfile";
             optional = true;
           }
           {
-            name = "rustls-pki-types";
-            packageId = "rustls-pki-types";
+            name = "socket2";
+            packageId = "socket2";
             optional = true;
+            features = [ "all" ];
           }
           {
             name = "tokio";
             packageId = "tokio";
+            optional = true;
+            usesDefaultFeatures = false;
           }
           {
             name = "tokio-rustls";
-            packageId = "tokio-rustls 0.25.0";
+            packageId = "tokio-rustls";
             optional = true;
+            usesDefaultFeatures = false;
+            features = [ "logging" "tls12" "ring" ];
           }
           {
             name = "tokio-stream";
             packageId = "tokio-stream";
+            usesDefaultFeatures = false;
           }
           {
             name = "tower";
             packageId = "tower";
             optional = true;
             usesDefaultFeatures = false;
-            features = [ "balance" "buffer" "discover" "limit" "load" "make" "timeout" "util" ];
           }
           {
             name = "tower-layer";
@@ -12026,24 +13545,28 @@ rec {
           }
         ];
         features = {
+          "channel" = [ "dep:hyper" "hyper?/client" "dep:hyper-util" "hyper-util?/client-legacy" "dep:tower" "tower?/balance" "tower?/buffer" "tower?/discover" "tower?/limit" "dep:tokio" "tokio?/time" "dep:hyper-timeout" ];
           "codegen" = [ "dep:async-trait" ];
           "default" = [ "transport" "codegen" "prost" ];
           "gzip" = [ "dep:flate2" ];
           "prost" = [ "dep:prost" ];
-          "tls" = [ "dep:rustls-pki-types" "dep:rustls-pemfile" "transport" "dep:tokio-rustls" "tokio/rt" "tokio/macros" ];
-          "tls-roots" = [ "tls-roots-common" "dep:rustls-native-certs" ];
-          "tls-roots-common" = [ "tls" ];
-          "tls-webpki-roots" = [ "tls-roots-common" "dep:webpki-roots" ];
-          "transport" = [ "dep:async-stream" "dep:axum" "channel" "dep:h2" "dep:hyper" "tokio/net" "tokio/time" "dep:tower" "dep:hyper-timeout" ];
+          "router" = [ "dep:axum" "dep:tower" "tower?/util" ];
+          "server" = [ "router" "dep:async-stream" "dep:h2" "dep:hyper" "hyper?/server" "dep:hyper-util" "hyper-util?/service" "hyper-util?/server-auto" "dep:socket2" "dep:tokio" "tokio?/macros" "tokio?/net" "tokio?/time" "tokio-stream/net" "dep:tower" "tower?/util" "tower?/limit" ];
+          "tls" = [ "dep:rustls-pemfile" "dep:tokio-rustls" "dep:tokio" "tokio?/rt" "tokio?/macros" ];
+          "tls-native-roots" = [ "tls" "channel" "dep:rustls-native-certs" ];
+          "tls-roots" = [ "tls-native-roots" ];
+          "tls-webpki-roots" = [ "tls" "channel" "dep:webpki-roots" ];
+          "transport" = [ "server" "channel" ];
           "zstd" = [ "dep:zstd" ];
         };
-        resolvedDefaultFeatures = [ "channel" "codegen" "default" "prost" "tls" "tls-roots" "tls-roots-common" "transport" ];
+        resolvedDefaultFeatures = [ "channel" "codegen" "default" "prost" "router" "server" "tls" "tls-native-roots" "tls-roots" "transport" ];
       };
       "tonic-build" = rec {
         crateName = "tonic-build";
-        version = "0.11.0";
+        version = "0.12.2";
         edition = "2021";
-        sha256 = "1hm99ckaw0pzq8h22bdjy6gpbg06kpvs0f73nj60f456f3fzckmy";
+        sha256 = "0rwaxvsx4rld1ncmcih0bvmg8k8ah4r1ccyjwgbnn4shfa3yhkpy";
+        libName = "tonic_build";
         authors = [
           "Lucio Franco <luciofranco14@gmail.com>"
         ];
@@ -12067,7 +13590,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
           }
         ];
         features = {
@@ -12078,11 +13601,61 @@ rec {
         };
         resolvedDefaultFeatures = [ "cleanup-markdown" "default" "prost" "prost-build" "transport" ];
       };
+      "tonic-health" = rec {
+        crateName = "tonic-health";
+        version = "0.12.2";
+        edition = "2021";
+        sha256 = "19xm66ban6w8gjybqhp4ra3zz9pnqfjis3j95dmy5fh6yzk382pc";
+        libName = "tonic_health";
+        authors = [
+          "James Nugent <james@jen20.com>"
+        ];
+        dependencies = [
+          {
+            name = "async-stream";
+            packageId = "async-stream";
+          }
+          {
+            name = "prost";
+            packageId = "prost";
+          }
+          {
+            name = "tokio";
+            packageId = "tokio";
+            features = [ "sync" ];
+          }
+          {
+            name = "tokio-stream";
+            packageId = "tokio-stream";
+          }
+          {
+            name = "tonic";
+            packageId = "tonic";
+            usesDefaultFeatures = false;
+            features = [ "codegen" "prost" ];
+          }
+        ];
+        devDependencies = [
+          {
+            name = "tokio";
+            packageId = "tokio";
+            features = [ "rt-multi-thread" "macros" ];
+          }
+          {
+            name = "tokio-stream";
+            packageId = "tokio-stream";
+          }
+        ];
+        features = {
+          "default" = [ "transport" ];
+        };
+      };
       "tonic-reflection" = rec {
         crateName = "tonic-reflection";
-        version = "0.11.0";
+        version = "0.12.2";
         edition = "2021";
-        sha256 = "19xn3kyg062d7y1cnw537v9cxkzzcrnfri0jb29fbyn0smxj532l";
+        sha256 = "1zwrm9zzahipsrmaqfp4vk0w31qymf740fsp0yczh16vxrsbhmkv";
+        libName = "tonic_reflection";
         authors = [
           "James Nugent <james@jen20.com>"
           "Samani G. Gikandi <samani@gojulas.com>"
@@ -12250,13 +13823,108 @@ rec {
           "tracing" = [ "dep:tracing" ];
           "util" = [ "__common" "futures-util" "pin-project" ];
         };
-        resolvedDefaultFeatures = [ "__common" "balance" "buffer" "default" "discover" "futures-core" "futures-util" "indexmap" "limit" "load" "log" "make" "pin-project" "pin-project-lite" "rand" "ready-cache" "slab" "timeout" "tokio" "tokio-util" "tracing" "util" ];
+        resolvedDefaultFeatures = [ "__common" "balance" "buffer" "default" "discover" "futures-core" "futures-util" "indexmap" "limit" "load" "log" "make" "pin-project" "pin-project-lite" "rand" "ready-cache" "slab" "tokio" "tokio-util" "tracing" "util" ];
+      };
+      "tower-http" = rec {
+        crateName = "tower-http";
+        version = "0.5.2";
+        edition = "2018";
+        sha256 = "1xakj3x0anp55gjqibiwvzma5iz0w9pcjsr7qk97sx4qm4sd970y";
+        libName = "tower_http";
+        authors = [
+          "Tower Maintainers <team@tower-rs.com>"
+        ];
+        dependencies = [
+          {
+            name = "bitflags";
+            packageId = "bitflags 2.6.0";
+          }
+          {
+            name = "bytes";
+            packageId = "bytes";
+          }
+          {
+            name = "http";
+            packageId = "http";
+          }
+          {
+            name = "http-body";
+            packageId = "http-body";
+          }
+          {
+            name = "http-body-util";
+            packageId = "http-body-util";
+          }
+          {
+            name = "pin-project-lite";
+            packageId = "pin-project-lite";
+          }
+          {
+            name = "tower-layer";
+            packageId = "tower-layer";
+          }
+          {
+            name = "tower-service";
+            packageId = "tower-service";
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+            optional = true;
+            usesDefaultFeatures = false;
+          }
+        ];
+        devDependencies = [
+          {
+            name = "bytes";
+            packageId = "bytes";
+          }
+        ];
+        features = {
+          "async-compression" = [ "dep:async-compression" ];
+          "auth" = [ "base64" "validate-request" ];
+          "base64" = [ "dep:base64" ];
+          "catch-panic" = [ "tracing" "futures-util/std" ];
+          "compression-br" = [ "async-compression/brotli" "futures-core" "tokio-util" "tokio" ];
+          "compression-deflate" = [ "async-compression/zlib" "futures-core" "tokio-util" "tokio" ];
+          "compression-full" = [ "compression-br" "compression-deflate" "compression-gzip" "compression-zstd" ];
+          "compression-gzip" = [ "async-compression/gzip" "futures-core" "tokio-util" "tokio" ];
+          "compression-zstd" = [ "async-compression/zstd" "futures-core" "tokio-util" "tokio" ];
+          "decompression-br" = [ "async-compression/brotli" "futures-core" "tokio-util" "tokio" ];
+          "decompression-deflate" = [ "async-compression/zlib" "futures-core" "tokio-util" "tokio" ];
+          "decompression-full" = [ "decompression-br" "decompression-deflate" "decompression-gzip" "decompression-zstd" ];
+          "decompression-gzip" = [ "async-compression/gzip" "futures-core" "tokio-util" "tokio" ];
+          "decompression-zstd" = [ "async-compression/zstd" "futures-core" "tokio-util" "tokio" ];
+          "follow-redirect" = [ "futures-util" "iri-string" "tower/util" ];
+          "fs" = [ "futures-util" "tokio/fs" "tokio-util/io" "tokio/io-util" "dep:http-range-header" "mime_guess" "mime" "percent-encoding" "httpdate" "set-status" "futures-util/alloc" "tracing" ];
+          "full" = [ "add-extension" "auth" "catch-panic" "compression-full" "cors" "decompression-full" "follow-redirect" "fs" "limit" "map-request-body" "map-response-body" "metrics" "normalize-path" "propagate-header" "redirect" "request-id" "sensitive-headers" "set-header" "set-status" "timeout" "trace" "util" "validate-request" ];
+          "futures-core" = [ "dep:futures-core" ];
+          "futures-util" = [ "dep:futures-util" ];
+          "httpdate" = [ "dep:httpdate" ];
+          "iri-string" = [ "dep:iri-string" ];
+          "metrics" = [ "tokio/time" ];
+          "mime" = [ "dep:mime" ];
+          "mime_guess" = [ "dep:mime_guess" ];
+          "percent-encoding" = [ "dep:percent-encoding" ];
+          "request-id" = [ "uuid" ];
+          "timeout" = [ "tokio/time" ];
+          "tokio" = [ "dep:tokio" ];
+          "tokio-util" = [ "dep:tokio-util" ];
+          "tower" = [ "dep:tower" ];
+          "trace" = [ "tracing" ];
+          "tracing" = [ "dep:tracing" ];
+          "util" = [ "tower" ];
+          "uuid" = [ "dep:uuid" ];
+          "validate-request" = [ "mime" ];
+        };
+        resolvedDefaultFeatures = [ "default" "trace" "tracing" ];
       };
       "tower-layer" = rec {
         crateName = "tower-layer";
         version = "0.3.2";
         edition = "2018";
         sha256 = "1l7i17k9vlssrdg4s3b0ia5jjkmmxsvv8s9y9ih0jfi8ssz8s362";
+        libName = "tower_layer";
         authors = [
           "Tower Maintainers <team@tower-rs.com>"
         ];
@@ -12267,6 +13935,7 @@ rec {
         version = "0.3.2";
         edition = "2018";
         sha256 = "0lmfzmmvid2yp2l36mbavhmqgsvzqf7r2wiwz73ml4xmwaf1rg5n";
+        libName = "tower_service";
         authors = [
           "Tower Maintainers <team@tower-rs.com>"
         ];
@@ -12317,7 +13986,7 @@ rec {
           "tracing-attributes" = [ "dep:tracing-attributes" ];
           "valuable" = [ "tracing-core/valuable" ];
         };
-        resolvedDefaultFeatures = [ "attributes" "default" "log" "max_level_trace" "release_max_level_info" "std" "tracing-attributes" ];
+        resolvedDefaultFeatures = [ "attributes" "default" "log" "max_level_trace" "release_max_level_debug" "std" "tracing-attributes" ];
       };
       "tracing-attributes" = rec {
         crateName = "tracing-attributes";
@@ -12325,6 +13994,7 @@ rec {
         edition = "2018";
         sha256 = "1rvb5dn9z6d0xdj14r403z0af0bbaqhg02hq4jc97g5wds6lqw1l";
         procMacro = true;
+        libName = "tracing_attributes";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
           "Eliza Weisman <eliza@buoyant.io>"
@@ -12341,7 +14011,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             usesDefaultFeatures = false;
             features = [ "full" "parsing" "printing" "visit-mut" "clone-impls" "extra-traits" "proc-macro" ];
           }
@@ -12353,6 +14023,7 @@ rec {
         version = "0.1.32";
         edition = "2018";
         sha256 = "0m5aglin3cdwxpvbg6kz0r9r0k31j48n0kcfwsp6l49z26k3svf0";
+        libName = "tracing_core";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -12383,6 +14054,7 @@ rec {
         version = "0.2.5";
         edition = "2018";
         sha256 = "1wimg0iwa2ldq7xv98lvivvf3q9ykfminig8r1bs0ig22np9bl4p";
+        libName = "tracing_futures";
         authors = [
           "Eliza Weisman <eliza@buoyant.io>"
           "Tokio Contributors <team@tokio.rs>"
@@ -12414,11 +14086,39 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "pin-project" "std" "std-future" ];
       };
+      "tracing-indicatif" = rec {
+        crateName = "tracing-indicatif";
+        version = "0.3.6";
+        edition = "2021";
+        sha256 = "07cmn4ilw8hdfzc1mirccwkgl160k3x9fhgg7xydj4gy9r181586";
+        libName = "tracing_indicatif";
+        dependencies = [
+          {
+            name = "indicatif";
+            packageId = "indicatif";
+            features = [ "in_memory" ];
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+          }
+          {
+            name = "tracing-core";
+            packageId = "tracing-core";
+          }
+          {
+            name = "tracing-subscriber";
+            packageId = "tracing-subscriber";
+          }
+        ];
+
+      };
       "tracing-log" = rec {
         crateName = "tracing-log";
         version = "0.2.0";
         edition = "2018";
         sha256 = "1hs77z026k730ij1a9dhahzrl0s073gfa2hm5p0fbl0b80gmz1gf";
+        libName = "tracing_log";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -12445,11 +14145,12 @@ rec {
         };
         resolvedDefaultFeatures = [ "log-tracer" "std" ];
       };
-      "tracing-opentelemetry" = rec {
+      "tracing-opentelemetry 0.23.0" = rec {
         crateName = "tracing-opentelemetry";
         version = "0.23.0";
         edition = "2018";
         sha256 = "1112kmckw0qwyckhbwarb230n4ldmfgzixr9jagbfjmy3fx19gm9";
+        libName = "tracing_opentelemetry";
         authors = [
           "Julian Tescher <julian@tescher.me>"
           "Tokio Contributors <team@tokio.rs>"
@@ -12466,13 +14167,13 @@ rec {
           }
           {
             name = "opentelemetry";
-            packageId = "opentelemetry";
+            packageId = "opentelemetry 0.22.0";
             usesDefaultFeatures = false;
             features = [ "trace" ];
           }
           {
             name = "opentelemetry_sdk";
-            packageId = "opentelemetry_sdk";
+            packageId = "opentelemetry_sdk 0.22.1";
             usesDefaultFeatures = false;
             features = [ "trace" ];
           }
@@ -12512,12 +14213,12 @@ rec {
         devDependencies = [
           {
             name = "opentelemetry";
-            packageId = "opentelemetry";
+            packageId = "opentelemetry 0.22.0";
             features = [ "trace" "metrics" ];
           }
           {
             name = "opentelemetry_sdk";
-            packageId = "opentelemetry_sdk";
+            packageId = "opentelemetry_sdk 0.22.1";
             usesDefaultFeatures = false;
             features = [ "trace" "rt-tokio" ];
           }
@@ -12545,11 +14246,111 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "metrics" "smallvec" "tracing-log" ];
       };
+      "tracing-opentelemetry 0.25.0" = rec {
+        crateName = "tracing-opentelemetry";
+        version = "0.25.0";
+        edition = "2021";
+        sha256 = "0fzzhpcxngnxra56cxmslr5y6k0f1b4ghqv9vz41p4kxvba4wy59";
+        libName = "tracing_opentelemetry";
+        dependencies = [
+          {
+            name = "js-sys";
+            packageId = "js-sys";
+            target = { target, features }: (("wasm32" == target."arch" or null) && (!("wasi" == target."os" or null)));
+          }
+          {
+            name = "once_cell";
+            packageId = "once_cell";
+          }
+          {
+            name = "opentelemetry";
+            packageId = "opentelemetry 0.24.0";
+            usesDefaultFeatures = false;
+            features = [ "trace" ];
+          }
+          {
+            name = "opentelemetry_sdk";
+            packageId = "opentelemetry_sdk 0.24.1";
+            usesDefaultFeatures = false;
+            features = [ "trace" ];
+          }
+          {
+            name = "smallvec";
+            packageId = "smallvec";
+            optional = true;
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+            usesDefaultFeatures = false;
+            features = [ "std" ];
+          }
+          {
+            name = "tracing-core";
+            packageId = "tracing-core";
+          }
+          {
+            name = "tracing-log";
+            packageId = "tracing-log";
+            optional = true;
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "tracing-subscriber";
+            packageId = "tracing-subscriber";
+            usesDefaultFeatures = false;
+            features = [ "registry" "std" ];
+          }
+          {
+            name = "web-time";
+            packageId = "web-time";
+            target = { target, features }: (("wasm32" == target."arch" or null) && (!("wasi" == target."os" or null)));
+          }
+        ];
+        devDependencies = [
+          {
+            name = "opentelemetry";
+            packageId = "opentelemetry 0.24.0";
+            features = [ "trace" "metrics" ];
+          }
+          {
+            name = "opentelemetry_sdk";
+            packageId = "opentelemetry_sdk 0.24.1";
+            usesDefaultFeatures = false;
+            features = [ "trace" "rt-tokio" ];
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+            usesDefaultFeatures = false;
+            features = [ "std" "attributes" ];
+          }
+          {
+            name = "tracing-subscriber";
+            packageId = "tracing-subscriber";
+            usesDefaultFeatures = false;
+            features = [ "registry" "std" "fmt" ];
+          }
+        ];
+        features = {
+          "async-trait" = [ "dep:async-trait" ];
+          "default" = [ "tracing-log" "metrics" ];
+          "futures-util" = [ "dep:futures-util" ];
+          "lazy_static" = [ "dep:lazy_static" ];
+          "metrics" = [ "opentelemetry/metrics" "opentelemetry_sdk/metrics" "smallvec" ];
+          "metrics_gauge_unstable" = [ "opentelemetry/otel_unstable" ];
+          "smallvec" = [ "dep:smallvec" ];
+          "thiserror" = [ "dep:thiserror" ];
+          "tracing-log" = [ "dep:tracing-log" ];
+        };
+        resolvedDefaultFeatures = [ "default" "metrics" "smallvec" "tracing-log" ];
+      };
       "tracing-subscriber" = rec {
         crateName = "tracing-subscriber";
         version = "0.3.18";
         edition = "2018";
         sha256 = "12vs1bwk4kig1l2qqjbbn2nm5amwiqmkcmnznylzmnfvjy6083xd";
+        libName = "tracing_subscriber";
         authors = [
           "Eliza Weisman <eliza@buoyant.io>"
           "David Barsky <me@davidbarsky.com>"
@@ -12658,16 +14459,175 @@ rec {
         };
         resolvedDefaultFeatures = [ "alloc" "ansi" "default" "env-filter" "fmt" "matchers" "nu-ansi-term" "once_cell" "regex" "registry" "sharded-slab" "smallvec" "std" "thread_local" "tracing" "tracing-log" ];
       };
+      "tracing-tracy" = rec {
+        crateName = "tracing-tracy";
+        version = "0.11.2";
+        edition = "2021";
+        sha256 = "16xnvxshac0qv61zzv0rm4sl8x0hkws7cngx5z3fambgy4chbaf6";
+        libName = "tracing_tracy";
+        authors = [
+          "Simonas Kazlauskas <tracing-tracy@kazlauskas.me>"
+        ];
+        dependencies = [
+          {
+            name = "tracing-core";
+            packageId = "tracing-core";
+            usesDefaultFeatures = false;
+            features = [ "std" ];
+          }
+          {
+            name = "tracing-subscriber";
+            packageId = "tracing-subscriber";
+            usesDefaultFeatures = false;
+            features = [ "fmt" "registry" ];
+          }
+          {
+            name = "tracy-client";
+            packageId = "tracy-client";
+            rename = "client";
+            usesDefaultFeatures = false;
+          }
+        ];
+        features = {
+          "broadcast" = [ "client/broadcast" ];
+          "callstack-inlines" = [ "client/callstack-inlines" ];
+          "code-transfer" = [ "client/code-transfer" ];
+          "context-switch-tracing" = [ "client/context-switch-tracing" ];
+          "default" = [ "enable" "system-tracing" "context-switch-tracing" "sampling" "code-transfer" "broadcast" "callstack-inlines" ];
+          "delayed-init" = [ "client/delayed-init" ];
+          "demangle" = [ "client/demangle" ];
+          "enable" = [ "client/enable" ];
+          "fibers" = [ "client/fibers" ];
+          "flush-on-exit" = [ "client/flush-on-exit" ];
+          "manual-lifetime" = [ "client/manual-lifetime" ];
+          "ondemand" = [ "client/ondemand" ];
+          "only-ipv4" = [ "client/only-ipv4" ];
+          "only-localhost" = [ "client/only-localhost" ];
+          "sampling" = [ "client/sampling" ];
+          "system-tracing" = [ "client/system-tracing" ];
+          "timer-fallback" = [ "client/timer-fallback" ];
+          "verify" = [ "client/verify" ];
+        };
+        resolvedDefaultFeatures = [ "broadcast" "callstack-inlines" "code-transfer" "context-switch-tracing" "default" "enable" "flush-on-exit" "sampling" "system-tracing" ];
+      };
+      "tracy-client" = rec {
+        crateName = "tracy-client";
+        version = "0.17.0";
+        edition = "2021";
+        sha256 = "07bla4iigg17fl4zil2dwizslhw8mv8vrsfkhr7ri27zchd97ysr";
+        libName = "tracy_client";
+        authors = [
+          "Simonas Kazlauskas <tracy-client@kazlauskas.me>"
+        ];
+        dependencies = [
+          {
+            name = "loom";
+            packageId = "loom";
+            target = { target, features }: (target."loom" or false);
+          }
+          {
+            name = "once_cell";
+            packageId = "once_cell";
+          }
+          {
+            name = "tracy-client-sys";
+            packageId = "tracy-client-sys";
+            rename = "sys";
+            usesDefaultFeatures = false;
+          }
+        ];
+        features = {
+          "broadcast" = [ "sys/broadcast" ];
+          "callstack-inlines" = [ "sys/callstack-inlines" ];
+          "code-transfer" = [ "sys/code-transfer" ];
+          "context-switch-tracing" = [ "sys/context-switch-tracing" ];
+          "default" = [ "enable" "system-tracing" "context-switch-tracing" "sampling" "code-transfer" "broadcast" "callstack-inlines" ];
+          "delayed-init" = [ "sys/delayed-init" ];
+          "enable" = [ "sys/enable" ];
+          "fibers" = [ "sys/fibers" ];
+          "flush-on-exit" = [ "sys/flush-on-exit" ];
+          "manual-lifetime" = [ "sys/manual-lifetime" ];
+          "ondemand" = [ "sys/ondemand" ];
+          "only-ipv4" = [ "sys/only-ipv4" ];
+          "only-localhost" = [ "sys/only-localhost" ];
+          "sampling" = [ "sys/sampling" ];
+          "system-tracing" = [ "sys/system-tracing" ];
+          "timer-fallback" = [ "sys/timer-fallback" ];
+        };
+        resolvedDefaultFeatures = [ "broadcast" "callstack-inlines" "code-transfer" "context-switch-tracing" "enable" "flush-on-exit" "sampling" "system-tracing" ];
+      };
+      "tracy-client-sys" = rec {
+        crateName = "tracy-client-sys";
+        version = "0.22.2";
+        edition = "2021";
+        sha256 = "10h8msq85b7rhfg2vg22g2iizbk4c6fcq0jiadad37gs1mhls44x";
+        libName = "tracy_client_sys";
+        authors = [
+          "Simonas Kazlauskas <tracy-client-sys@kazlauskas.me>"
+        ];
+        buildDependencies = [
+          {
+            name = "cc";
+            packageId = "cc";
+            usesDefaultFeatures = false;
+          }
+        ];
+        features = {
+          "default" = [ "enable" "system-tracing" "context-switch-tracing" "sampling" "code-transfer" "broadcast" "callstack-inlines" ];
+          "manual-lifetime" = [ "delayed-init" ];
+        };
+        resolvedDefaultFeatures = [ "broadcast" "callstack-inlines" "code-transfer" "context-switch-tracing" "enable" "flush-on-exit" "sampling" "system-tracing" ];
+      };
       "try-lock" = rec {
         crateName = "try-lock";
         version = "0.2.5";
         edition = "2015";
         sha256 = "0jqijrrvm1pyq34zn1jmy2vihd4jcrjlvsh4alkjahhssjnsn8g4";
+        libName = "try_lock";
         authors = [
           "Sean McArthur <sean@seanmonstar.com>"
         ];
 
       };
+      "trybuild" = rec {
+        crateName = "trybuild";
+        version = "1.0.99";
+        edition = "2021";
+        sha256 = "1s4i2hpyb66676xkg6b6fxm2qdsawj5lfad8ds68vgn46q6sayi0";
+        authors = [
+          "David Tolnay <dtolnay@gmail.com>"
+        ];
+        dependencies = [
+          {
+            name = "glob";
+            packageId = "glob";
+          }
+          {
+            name = "serde";
+            packageId = "serde";
+          }
+          {
+            name = "serde_derive";
+            packageId = "serde_derive";
+          }
+          {
+            name = "serde_json";
+            packageId = "serde_json";
+          }
+          {
+            name = "termcolor";
+            packageId = "termcolor";
+          }
+          {
+            name = "toml";
+            packageId = "toml 0.8.19";
+          }
+        ];
+        features = {
+          "diff" = [ "dissimilar" ];
+          "dissimilar" = [ "dep:dissimilar" ];
+        };
+      };
       "tvix-build" = rec {
         crateName = "tvix-build";
         version = "0.1.0";
@@ -12679,12 +14639,8 @@ rec {
             requiredFeatures = [ ];
           }
         ];
-        # We can't filter paths with references in Nix 2.4
-        # See https://github.com/NixOS/nix/issues/5410
-        src =
-          if ((lib.versionOlder builtins.nixVersion "2.4pre20211007") || (lib.versionOlder "2.5" builtins.nixVersion))
-          then lib.cleanSourceWith { filter = sourceFilter; src = ./build; }
-          else ./build;
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ./build; };
+        libName = "tvix_build";
         dependencies = [
           {
             name = "bytes";
@@ -12697,7 +14653,11 @@ rec {
           }
           {
             name = "itertools";
-            packageId = "itertools 0.12.0";
+            packageId = "itertools 0.12.1";
+          }
+          {
+            name = "mimalloc";
+            packageId = "mimalloc";
           }
           {
             name = "prost";
@@ -12714,7 +14674,7 @@ rec {
           {
             name = "tokio-listener";
             packageId = "tokio-listener";
-            features = [ "tonic011" ];
+            features = [ "tonic012" ];
           }
           {
             name = "tonic";
@@ -12731,14 +14691,14 @@ rec {
             packageId = "tracing";
           }
           {
-            name = "tracing-subscriber";
-            packageId = "tracing-subscriber";
-          }
-          {
             name = "tvix-castore";
             packageId = "tvix-castore";
           }
           {
+            name = "tvix-tracing";
+            packageId = "tvix-tracing";
+          }
+          {
             name = "url";
             packageId = "url";
           }
@@ -12760,7 +14720,7 @@ rec {
           }
         ];
         features = {
-          "tonic-reflection" = [ "dep:tonic-reflection" ];
+          "tonic-reflection" = [ "dep:tonic-reflection" "tvix-castore/tonic-reflection" ];
         };
         resolvedDefaultFeatures = [ "default" "tonic-reflection" ];
       };
@@ -12768,12 +14728,8 @@ rec {
         crateName = "tvix-castore";
         version = "0.1.0";
         edition = "2021";
-        # We can't filter paths with references in Nix 2.4
-        # See https://github.com/NixOS/nix/issues/5410
-        src =
-          if ((lib.versionOlder builtins.nixVersion "2.4pre20211007") || (lib.versionOlder "2.5" builtins.nixVersion))
-          then lib.cleanSourceWith { filter = sourceFilter; src = ./castore; }
-          else ./castore;
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ./castore; };
+        libName = "tvix_castore";
         dependencies = [
           {
             name = "async-compression";
@@ -12815,6 +14771,10 @@ rec {
             packageId = "digest";
           }
           {
+            name = "erased-serde";
+            packageId = "erased-serde";
+          }
+          {
             name = "fastcdc";
             packageId = "fastcdc";
             features = [ "tokio" ];
@@ -12829,6 +14789,10 @@ rec {
             packageId = "futures";
           }
           {
+            name = "hyper-util";
+            packageId = "hyper-util";
+          }
+          {
             name = "lazy_static";
             packageId = "lazy_static";
           }
@@ -12844,7 +14808,7 @@ rec {
           }
           {
             name = "parking_lot";
-            packageId = "parking_lot 0.12.2";
+            packageId = "parking_lot 0.12.3";
           }
           {
             name = "petgraph";
@@ -12859,6 +14823,10 @@ rec {
             packageId = "prost";
           }
           {
+            name = "redb";
+            packageId = "redb";
+          }
+          {
             name = "serde";
             packageId = "serde";
             features = [ "derive" ];
@@ -12868,6 +14836,10 @@ rec {
             packageId = "serde_qs";
           }
           {
+            name = "serde_tagged";
+            packageId = "serde_tagged";
+          }
+          {
             name = "serde_with";
             packageId = "serde_with";
           }
@@ -12880,6 +14852,11 @@ rec {
             packageId = "thiserror";
           }
           {
+            name = "threadpool";
+            packageId = "threadpool";
+            optional = true;
+          }
+          {
             name = "tokio";
             packageId = "tokio";
             features = [ "fs" "macros" "net" "rt" "rt-multi-thread" "signal" ];
@@ -12916,6 +14893,15 @@ rec {
             packageId = "tracing";
           }
           {
+            name = "tracing-indicatif";
+            packageId = "tracing-indicatif";
+          }
+          {
+            name = "tvix-tracing";
+            packageId = "tvix-tracing";
+            features = [ "tonic" ];
+          }
+          {
             name = "url";
             packageId = "url";
           }
@@ -12986,6 +14972,10 @@ rec {
             packageId = "rstest_reuse";
           }
           {
+            name = "serde_json";
+            packageId = "serde_json";
+          }
+          {
             name = "tempfile";
             packageId = "tempfile";
           }
@@ -13000,7 +14990,8 @@ rec {
         ];
         features = {
           "cloud" = [ "dep:bigtable_rs" "object_store/aws" "object_store/azure" "object_store/gcp" ];
-          "fs" = [ "dep:libc" "dep:fuse-backend-rs" ];
+          "default" = [ "cloud" ];
+          "fs" = [ "dep:fuse-backend-rs" "dep:threadpool" "dep:libc" ];
           "fuse" = [ "fs" ];
           "tonic-reflection" = [ "dep:tonic-reflection" ];
           "virtiofs" = [ "fs" "dep:vhost" "dep:vhost-user-backend" "dep:virtio-queue" "dep:vm-memory" "dep:vmm-sys-util" "dep:virtio-bindings" "fuse-backend-rs?/vhost-user-fs" "fuse-backend-rs?/virtiofs" ];
@@ -13018,12 +15009,8 @@ rec {
             requiredFeatures = [ ];
           }
         ];
-        # We can't filter paths with references in Nix 2.4
-        # See https://github.com/NixOS/nix/issues/5410
-        src =
-          if ((lib.versionOlder builtins.nixVersion "2.4pre20211007") || (lib.versionOlder "2.5" builtins.nixVersion))
-          then lib.cleanSourceWith { filter = sourceFilter; src = ./cli; }
-          else ./cli;
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ./cli; };
+        libName = "tvix_cli";
         dependencies = [
           {
             name = "bytes";
@@ -13039,14 +15026,34 @@ rec {
             packageId = "dirs";
           }
           {
+            name = "mimalloc";
+            packageId = "mimalloc";
+          }
+          {
             name = "nix-compat";
             packageId = "nix-compat";
           }
           {
+            name = "rnix";
+            packageId = "rnix";
+          }
+          {
+            name = "rowan";
+            packageId = "rowan";
+          }
+          {
+            name = "rustc-hash";
+            packageId = "rustc-hash 2.0.0";
+          }
+          {
             name = "rustyline";
             packageId = "rustyline";
           }
           {
+            name = "smol_str";
+            packageId = "smol_str";
+          }
+          {
             name = "thiserror";
             packageId = "thiserror";
           }
@@ -13057,11 +15064,10 @@ rec {
           {
             name = "tracing";
             packageId = "tracing";
-            features = [ "max_level_trace" "release_max_level_info" ];
           }
           {
-            name = "tracing-subscriber";
-            packageId = "tracing-subscriber";
+            name = "tracing-indicatif";
+            packageId = "tracing-indicatif";
           }
           {
             name = "tvix-build";
@@ -13085,22 +15091,30 @@ rec {
             usesDefaultFeatures = false;
           }
           {
+            name = "tvix-tracing";
+            packageId = "tvix-tracing";
+          }
+          {
             name = "wu-manber";
             packageId = "wu-manber";
           }
         ];
-
+        devDependencies = [
+          {
+            name = "expect-test";
+            packageId = "expect-test";
+          }
+        ];
+        features = {
+          "tracy" = [ "tvix-tracing/tracy" ];
+        };
+        resolvedDefaultFeatures = [ "default" "tracy" ];
       };
       "tvix-eval" = rec {
         crateName = "tvix-eval";
         version = "0.1.0";
         edition = "2021";
-        # We can't filter paths with references in Nix 2.4
-        # See https://github.com/NixOS/nix/issues/5410
-        src =
-          if ((lib.versionOlder builtins.nixVersion "2.4pre20211007") || (lib.versionOlder "2.5" builtins.nixVersion))
-          then lib.cleanSourceWith { filter = sourceFilter; src = ./eval; }
-          else ./eval;
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ./eval; };
         libName = "tvix_eval";
         dependencies = [
           {
@@ -13134,13 +15148,8 @@ rec {
             usesDefaultFeatures = false;
           }
           {
-            name = "imbl";
-            packageId = "imbl";
-            features = [ "serde" ];
-          }
-          {
             name = "itertools";
-            packageId = "itertools 0.12.0";
+            packageId = "itertools 0.12.1";
           }
           {
             name = "lazy_static";
@@ -13156,6 +15165,10 @@ rec {
             packageId = "md-5";
           }
           {
+            name = "nohash-hasher";
+            packageId = "nohash-hasher";
+          }
+          {
             name = "os_str_bytes";
             packageId = "os_str_bytes";
             features = [ "conversions" ];
@@ -13184,6 +15197,10 @@ rec {
             packageId = "rowan";
           }
           {
+            name = "rustc-hash";
+            packageId = "rustc-hash 2.0.0";
+          }
+          {
             name = "serde";
             packageId = "serde";
             features = [ "rc" "derive" ];
@@ -13215,7 +15232,7 @@ rec {
           }
           {
             name = "toml";
-            packageId = "toml";
+            packageId = "toml 0.6.0";
           }
           {
             name = "tvix-eval-builtin-macros";
@@ -13223,8 +15240,8 @@ rec {
             rename = "builtin-macros";
           }
           {
-            name = "xml-rs";
-            packageId = "xml-rs";
+            name = "vu128";
+            packageId = "vu128";
           }
         ];
         devDependencies = [
@@ -13234,7 +15251,11 @@ rec {
           }
           {
             name = "itertools";
-            packageId = "itertools 0.12.0";
+            packageId = "itertools 0.12.1";
+          }
+          {
+            name = "mimalloc";
+            packageId = "mimalloc";
           }
           {
             name = "pretty_assertions";
@@ -13250,24 +15271,20 @@ rec {
           }
         ];
         features = {
-          "arbitrary" = [ "proptest" "test-strategy" "imbl/proptest" ];
+          "arbitrary" = [ "proptest" "test-strategy" ];
           "default" = [ "impure" "arbitrary" "nix_tests" ];
           "proptest" = [ "dep:proptest" ];
           "test-strategy" = [ "dep:test-strategy" ];
         };
-        resolvedDefaultFeatures = [ "arbitrary" "default" "impure" "nix_tests" "proptest" "test-strategy" ];
+        resolvedDefaultFeatures = [ "arbitrary" "default" "impure" "nix_tests" "no_leak" "proptest" "test-strategy" ];
       };
       "tvix-eval-builtin-macros" = rec {
         crateName = "tvix-eval-builtin-macros";
         version = "0.0.1";
         edition = "2021";
-        # We can't filter paths with references in Nix 2.4
-        # See https://github.com/NixOS/nix/issues/5410
-        src =
-          if ((lib.versionOlder builtins.nixVersion "2.4pre20211007") || (lib.versionOlder "2.5" builtins.nixVersion))
-          then lib.cleanSourceWith { filter = sourceFilter; src = ./eval/builtin-macros; }
-          else ./eval/builtin-macros;
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ./eval/builtin-macros; };
         procMacro = true;
+        libName = "tvix_eval_builtin_macros";
         authors = [
           "Griffin Smith <root@gws.fyi>"
         ];
@@ -13298,12 +15315,8 @@ rec {
         crateName = "tvix-glue";
         version = "0.1.0";
         edition = "2021";
-        # We can't filter paths with references in Nix 2.4
-        # See https://github.com/NixOS/nix/issues/5410
-        src =
-          if ((lib.versionOlder builtins.nixVersion "2.4pre20211007") || (lib.versionOlder "2.5" builtins.nixVersion))
-          then lib.cleanSourceWith { filter = sourceFilter; src = ./glue; }
-          else ./glue;
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ./glue; };
+        libName = "tvix_glue";
         dependencies = [
           {
             name = "async-compression";
@@ -13319,6 +15332,10 @@ rec {
             packageId = "bytes";
           }
           {
+            name = "clap";
+            packageId = "clap";
+          }
+          {
             name = "data-encoding";
             packageId = "data-encoding";
           }
@@ -13386,6 +15403,10 @@ rec {
             packageId = "tracing";
           }
           {
+            name = "tracing-indicatif";
+            packageId = "tracing-indicatif";
+          }
+          {
             name = "tvix-build";
             packageId = "tvix-build";
             usesDefaultFeatures = false;
@@ -13404,6 +15425,10 @@ rec {
             usesDefaultFeatures = false;
           }
           {
+            name = "tvix-tracing";
+            packageId = "tvix-tracing";
+          }
+          {
             name = "url";
             packageId = "url";
           }
@@ -13431,6 +15456,10 @@ rec {
             packageId = "lazy_static";
           }
           {
+            name = "mimalloc";
+            packageId = "mimalloc";
+          }
+          {
             name = "nix";
             packageId = "nix 0.27.1";
             features = [ "fs" ];
@@ -13447,6 +15476,10 @@ rec {
             name = "tempfile";
             packageId = "tempfile";
           }
+          {
+            name = "tokio-test";
+            packageId = "tokio-test";
+          }
         ];
         features = {
           "default" = [ "nix_tests" ];
@@ -13457,12 +15490,8 @@ rec {
         crateName = "tvix-serde";
         version = "0.1.0";
         edition = "2021";
-        # We can't filter paths with references in Nix 2.4
-        # See https://github.com/NixOS/nix/issues/5410
-        src =
-          if ((lib.versionOlder builtins.nixVersion "2.4pre20211007") || (lib.versionOlder "2.5" builtins.nixVersion))
-          then lib.cleanSourceWith { filter = sourceFilter; src = ./serde; }
-          else ./serde;
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ./serde; };
+        libName = "tvix_serde";
         dependencies = [
           {
             name = "bstr";
@@ -13492,12 +15521,8 @@ rec {
             requiredFeatures = [ ];
           }
         ];
-        # We can't filter paths with references in Nix 2.4
-        # See https://github.com/NixOS/nix/issues/5410
-        src =
-          if ((lib.versionOlder builtins.nixVersion "2.4pre20211007") || (lib.versionOlder "2.5" builtins.nixVersion))
-          then lib.cleanSourceWith { filter = sourceFilter; src = ./store; }
-          else ./store;
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ./store; };
+        libName = "tvix_store";
         dependencies = [
           {
             name = "anyhow";
@@ -13548,6 +15573,10 @@ rec {
             packageId = "futures";
           }
           {
+            name = "hyper-util";
+            packageId = "hyper-util";
+          }
+          {
             name = "lazy_static";
             packageId = "lazy_static";
           }
@@ -13556,29 +15585,17 @@ rec {
             packageId = "lru";
           }
           {
+            name = "mimalloc";
+            packageId = "mimalloc";
+          }
+          {
             name = "nix-compat";
             packageId = "nix-compat";
             features = [ "async" ];
           }
           {
-            name = "opentelemetry";
-            packageId = "opentelemetry";
-            optional = true;
-          }
-          {
-            name = "opentelemetry-otlp";
-            packageId = "opentelemetry-otlp";
-            optional = true;
-          }
-          {
-            name = "opentelemetry_sdk";
-            packageId = "opentelemetry_sdk";
-            optional = true;
-            features = [ "rt-tokio" ];
-          }
-          {
             name = "parking_lot";
-            packageId = "parking_lot 0.12.2";
+            packageId = "parking_lot 0.12.3";
           }
           {
             name = "pin-project-lite";
@@ -13589,12 +15606,20 @@ rec {
             packageId = "prost";
           }
           {
+            name = "redb";
+            packageId = "redb";
+          }
+          {
             name = "reqwest";
             packageId = "reqwest";
             usesDefaultFeatures = false;
             features = [ "rustls-tls-native-roots" "stream" ];
           }
           {
+            name = "reqwest-middleware";
+            packageId = "reqwest-middleware";
+          }
+          {
             name = "serde";
             packageId = "serde";
             features = [ "derive" ];
@@ -13631,7 +15656,7 @@ rec {
           {
             name = "tokio-listener";
             packageId = "tokio-listener";
-            features = [ "tonic011" ];
+            features = [ "clap" "multi-listener" "sd_listen" "tonic012" ];
           }
           {
             name = "tokio-stream";
@@ -13644,11 +15669,21 @@ rec {
             features = [ "io" "io-util" "compat" ];
           }
           {
+            name = "toml";
+            packageId = "toml 0.8.19";
+            optional = true;
+          }
+          {
             name = "tonic";
             packageId = "tonic";
             features = [ "tls" "tls-roots" ];
           }
           {
+            name = "tonic-health";
+            packageId = "tonic-health";
+            usesDefaultFeatures = false;
+          }
+          {
             name = "tonic-reflection";
             packageId = "tonic-reflection";
             optional = true;
@@ -13658,23 +15693,28 @@ rec {
             packageId = "tower";
           }
           {
+            name = "tower-http";
+            packageId = "tower-http";
+            features = [ "trace" ];
+          }
+          {
             name = "tracing";
             packageId = "tracing";
           }
           {
-            name = "tracing-opentelemetry";
-            packageId = "tracing-opentelemetry";
-          }
-          {
-            name = "tracing-subscriber";
-            packageId = "tracing-subscriber";
-            features = [ "env-filter" ];
+            name = "tracing-indicatif";
+            packageId = "tracing-indicatif";
           }
           {
             name = "tvix-castore";
             packageId = "tvix-castore";
           }
           {
+            name = "tvix-tracing";
+            packageId = "tvix-tracing";
+            features = [ "tonic" "reqwest" ];
+          }
+          {
             name = "url";
             packageId = "url";
           }
@@ -13719,11 +15759,125 @@ rec {
           "cloud" = [ "dep:bigtable_rs" "tvix-castore/cloud" ];
           "default" = [ "cloud" "fuse" "otlp" "tonic-reflection" ];
           "fuse" = [ "tvix-castore/fuse" ];
-          "otlp" = [ "dep:opentelemetry" "dep:opentelemetry-otlp" "dep:opentelemetry_sdk" ];
+          "otlp" = [ "tvix-tracing/otlp" ];
+          "toml" = [ "dep:toml" ];
           "tonic-reflection" = [ "dep:tonic-reflection" "tvix-castore/tonic-reflection" ];
+          "tracy" = [ "tvix-tracing/tracy" ];
           "virtiofs" = [ "tvix-castore/virtiofs" ];
+          "xp-store-composition" = [ "toml" ];
         };
-        resolvedDefaultFeatures = [ "cloud" "default" "fuse" "integration" "otlp" "tonic-reflection" "virtiofs" ];
+        resolvedDefaultFeatures = [ "cloud" "default" "fuse" "integration" "otlp" "toml" "tonic-reflection" "tracy" "virtiofs" "xp-store-composition" ];
+      };
+      "tvix-tracing" = rec {
+        crateName = "tvix-tracing";
+        version = "0.1.0";
+        edition = "2021";
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ./tracing; };
+        libName = "tvix_tracing";
+        dependencies = [
+          {
+            name = "axum";
+            packageId = "axum";
+            optional = true;
+          }
+          {
+            name = "http";
+            packageId = "http";
+            optional = true;
+          }
+          {
+            name = "indicatif";
+            packageId = "indicatif";
+          }
+          {
+            name = "lazy_static";
+            packageId = "lazy_static";
+          }
+          {
+            name = "opentelemetry";
+            packageId = "opentelemetry 0.24.0";
+            optional = true;
+          }
+          {
+            name = "opentelemetry-http";
+            packageId = "opentelemetry-http";
+            optional = true;
+          }
+          {
+            name = "opentelemetry-otlp";
+            packageId = "opentelemetry-otlp";
+            optional = true;
+          }
+          {
+            name = "opentelemetry_sdk";
+            packageId = "opentelemetry_sdk 0.24.1";
+            optional = true;
+            features = [ "rt-tokio" ];
+          }
+          {
+            name = "reqwest-tracing";
+            packageId = "reqwest-tracing";
+            optional = true;
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "thiserror";
+            packageId = "thiserror";
+          }
+          {
+            name = "tokio";
+            packageId = "tokio";
+            features = [ "sync" "rt" ];
+          }
+          {
+            name = "tonic";
+            packageId = "tonic";
+            optional = true;
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+            features = [ "max_level_trace" "release_max_level_debug" ];
+          }
+          {
+            name = "tracing-indicatif";
+            packageId = "tracing-indicatif";
+          }
+          {
+            name = "tracing-opentelemetry";
+            packageId = "tracing-opentelemetry 0.25.0";
+            optional = true;
+          }
+          {
+            name = "tracing-subscriber";
+            packageId = "tracing-subscriber";
+            features = [ "env-filter" ];
+          }
+          {
+            name = "tracing-tracy";
+            packageId = "tracing-tracy";
+            optional = true;
+            features = [ "flush-on-exit" ];
+          }
+        ];
+        features = {
+          "axum" = [ "dep:axum" ];
+          "otlp" = [ "dep:tracing-opentelemetry" "dep:opentelemetry" "dep:opentelemetry-otlp" "dep:opentelemetry_sdk" "dep:opentelemetry-http" "reqwest-tracing?/opentelemetry_0_22" ];
+          "reqwest" = [ "dep:reqwest-tracing" ];
+          "tonic" = [ "dep:tonic" "dep:http" ];
+          "tracy" = [ "dep:tracing-tracy" ];
+        };
+        resolvedDefaultFeatures = [ "axum" "default" "otlp" "reqwest" "tonic" "tracy" ];
+      };
+      "typeid" = rec {
+        crateName = "typeid";
+        version = "1.0.0";
+        edition = "2021";
+        sha256 = "1ky97g0dwzdhmbcwzy098biqh26vhlc98l5x6zy44yhyk7687785";
+        authors = [
+          "David Tolnay <dtolnay@gmail.com>"
+        ];
+
       };
       "typenum" = rec {
         crateName = "typenum";
@@ -13784,6 +15938,7 @@ rec {
         edition = "2021";
         sha256 = "1cabnvm526bcgh1sh34js5ils0gz4xwlgvwhm992acdr8xzqhwxc";
         procMacro = true;
+        libName = "typetag_impl";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -13798,7 +15953,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             features = [ "full" ];
           }
         ];
@@ -13851,6 +16006,7 @@ rec {
         version = "1.0.12";
         edition = "2018";
         sha256 = "0jzf1znfpb2gx8nr8mvmyqs1crnv79l57nxnbiszc7xf7ynbjm1k";
+        libName = "unicode_ident";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -13861,6 +16017,7 @@ rec {
         version = "0.1.22";
         edition = "2018";
         sha256 = "08d95g7b1irc578b2iyhzv4xhsa4pfvwsqxcl9lbcpabzkq16msw";
+        libName = "unicode_normalization";
         authors = [
           "kwantam <kwantam@gmail.com>"
           "Manish Goregaokar <manishsmail@gmail.com>"
@@ -13882,6 +16039,7 @@ rec {
         version = "1.10.1";
         edition = "2018";
         sha256 = "0dky2hm5k51xy11hc3nk85p533rvghd462b6i0c532b7hl4j9mhx";
+        libName = "unicode_segmentation";
         authors = [
           "kwantam <kwantam@gmail.com>"
           "Manish Goregaokar <manishsmail@gmail.com>"
@@ -13893,6 +16051,7 @@ rec {
         version = "0.1.11";
         edition = "2015";
         sha256 = "11ds4ydhg8g7l06rlmh712q41qsrd0j0h00n1jm74kww3kqk65z5";
+        libName = "unicode_width";
         authors = [
           "kwantam <kwantam@gmail.com>"
           "Manish Goregaokar <manishsmail@gmail.com>"
@@ -13917,9 +16076,9 @@ rec {
       };
       "url" = rec {
         crateName = "url";
-        version = "2.5.0";
+        version = "2.5.2";
         edition = "2018";
-        sha256 = "0cs65961miawncdg2z20171w0vqrmraswv2ihdpd8lxp7cp31rii";
+        sha256 = "0v2dx50mx7xzl9454cl5qmpjnhkbahmn59gd3apyipbgyyylsy12";
         authors = [
           "The rust-url developers"
         ];
@@ -14086,6 +16245,7 @@ rec {
         version = "0.8.0";
         edition = "2018";
         sha256 = "00s33wy8cj2i8b4hlxn7wd8zm1fpaa5kjhzv77b3khsavf8pn8wz";
+        libName = "vhost_user_backend";
         authors = [
           "The Cloud Hypervisor Authors"
         ];
@@ -14140,6 +16300,7 @@ rec {
         version = "0.1.0";
         edition = "2018";
         sha256 = "0sxxhhmz1r4s4q5pd2lykswcv9qk05fmpwc5xlb8aj45h8bi5x9z";
+        libName = "virtio_bindings";
         authors = [
           "Sergio Lopez <slp@redhat.com>"
         ];
@@ -14150,6 +16311,7 @@ rec {
         version = "0.2.2";
         edition = "2021";
         sha256 = "11mfm9brqgwpfg0izsgc4n7xkqwxk5ad03ivslq0r88j50dwp2w7";
+        libName = "virtio_bindings";
         authors = [
           "Sergio Lopez <slp@redhat.com>"
         ];
@@ -14160,6 +16322,7 @@ rec {
         version = "0.7.1";
         edition = "2021";
         sha256 = "1gbppbapj7c0vyca88vl34cx4sp2cy9yg0v6bvyd5h11rhmixa1v";
+        libName = "virtio_queue";
         authors = [
           "The Chromium OS Authors"
         ];
@@ -14195,6 +16358,7 @@ rec {
         version = "0.10.0";
         edition = "2021";
         sha256 = "0z423a8i4s3addq4yjad4ar5l6qwarjwdn94lismbd0mcqv712k8";
+        libName = "vm_memory";
         authors = [
           "Liu Jiang <gerry@linux.alibaba.com>"
         ];
@@ -14226,6 +16390,7 @@ rec {
         version = "0.11.2";
         edition = "2021";
         sha256 = "0a9azxk6wsahwkggshbdga4jdryzfw6j5r21f11gf50j4f2b1ds8";
+        libName = "vmm_sys_util";
         authors = [
           "Intel Virtualization Team <vmm-maintainers@intel.com>"
         ];
@@ -14246,29 +16411,113 @@ rec {
           "with-serde" = [ "serde" "serde_derive" ];
         };
       };
-      "wait-timeout" = rec {
-        crateName = "wait-timeout";
-        version = "0.2.0";
-        edition = "2015";
-        crateBin = [ ];
-        sha256 = "1xpkk0j5l9pfmjfh1pi0i89invlavfrd9av5xp0zhxgb29dhy84z";
+      "vt100" = rec {
+        crateName = "vt100";
+        version = "0.15.2";
+        edition = "2021";
+        sha256 = "1pklc8y984axmxr0cd363srr2d27wd5rj15xlcmkjznvy0xqdkc4";
         authors = [
-          "Alex Crichton <alex@alexcrichton.com>"
+          "Jesse Luehrs <doy@tozt.net>"
         ];
         dependencies = [
           {
-            name = "libc";
-            packageId = "libc";
-            target = { target, features }: (target."unix" or false);
+            name = "itoa";
+            packageId = "itoa";
+          }
+          {
+            name = "log";
+            packageId = "log";
+          }
+          {
+            name = "unicode-width";
+            packageId = "unicode-width";
+          }
+          {
+            name = "vte";
+            packageId = "vte";
+          }
+        ];
+        devDependencies = [
+          {
+            name = "vte";
+            packageId = "vte";
+          }
+        ];
+
+      };
+      "vte" = rec {
+        crateName = "vte";
+        version = "0.11.1";
+        edition = "2021";
+        sha256 = "15r1ff4j8ndqj9vsyil3wqwxhhl7jsz5g58f31n0h1wlpxgjn0pm";
+        authors = [
+          "Joe Wilm <joe@jwilm.com>"
+          "Christian Duerr <contact@christianduerr.com>"
+        ];
+        dependencies = [
+          {
+            name = "arrayvec";
+            packageId = "arrayvec";
+            optional = true;
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "utf8parse";
+            packageId = "utf8parse";
+          }
+          {
+            name = "vte_generate_state_changes";
+            packageId = "vte_generate_state_changes";
+          }
+        ];
+        features = {
+          "ansi" = [ "log" ];
+          "arrayvec" = [ "dep:arrayvec" ];
+          "default" = [ "no_std" ];
+          "log" = [ "dep:log" ];
+          "nightly" = [ "utf8parse/nightly" ];
+          "no_std" = [ "arrayvec" ];
+          "serde" = [ "dep:serde" ];
+        };
+        resolvedDefaultFeatures = [ "arrayvec" "default" "no_std" ];
+      };
+      "vte_generate_state_changes" = rec {
+        crateName = "vte_generate_state_changes";
+        version = "0.1.1";
+        edition = "2018";
+        sha256 = "1zs5q766q7jmc80c5c80gpzy4qpg5lnydf94mgdzrpy7h5q82myj";
+        procMacro = true;
+        authors = [
+          "Christian Duerr <contact@christianduerr.com>"
+        ];
+        dependencies = [
+          {
+            name = "proc-macro2";
+            packageId = "proc-macro2";
+          }
+          {
+            name = "quote";
+            packageId = "quote";
           }
         ];
 
       };
+      "vu128" = rec {
+        crateName = "vu128";
+        version = "1.1.0";
+        edition = "2018";
+        sha256 = "1pczgy26c0lsri1ddrx5wkgn0rcq4da04pqya5rl6vrwfnys73di";
+        libPath = "vu128/vu128.rs";
+        authors = [
+          "John Millikin <john@john-millikin.com>"
+        ];
+
+      };
       "walkdir" = rec {
         crateName = "walkdir";
-        version = "2.4.0";
+        version = "2.5.0";
         edition = "2018";
-        sha256 = "1vjl9fmfc4v8k9ald23qrpcbyb8dl1ynyq8d516cm537r1yqa7fp";
+        sha256 = "0jsy7a710qv8gld5957ybrnc07gavppp963gs32xk4ag8130jy99";
         authors = [
           "Andrew Gallant <jamslam@gmail.com>"
         ];
@@ -14323,6 +16572,7 @@ rec {
         version = "0.2.90";
         edition = "2018";
         sha256 = "01jlal3mynqwvqx4acrdnr9bvsdczaz2sy8lmmzmqh81lab348mi";
+        libName = "wasm_bindgen";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -14354,6 +16604,7 @@ rec {
         version = "0.2.90";
         edition = "2018";
         sha256 = "1kcxml9762zjdrn0h0n0qxfg1n7z1f577jcc5yimi3a0cddr7p7w";
+        libName = "wasm_bindgen_backend";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -14380,7 +16631,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             features = [ "full" ];
           }
           {
@@ -14398,6 +16649,7 @@ rec {
         version = "0.4.40";
         edition = "2018";
         sha256 = "0qf4bzlinyg0s4b38fhzdi1cqdd7rgrywqdjr3ngmgc6xcm07qmx";
+        libName = "wasm_bindgen_futures";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -14432,6 +16684,7 @@ rec {
         edition = "2018";
         sha256 = "16d980bql7y5krfqlmcr8mk1q4mrm0rmb0a99j92im5jc62j6k1y";
         procMacro = true;
+        libName = "wasm_bindgen_macro";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -14456,6 +16709,7 @@ rec {
         version = "0.2.90";
         edition = "2018";
         sha256 = "19r5bsyjw0fvim7dsj8pbwrq8v0ggh845lhfasgavhbdh2vapqds";
+        libName = "wasm_bindgen_macro_support";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -14470,7 +16724,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
             features = [ "visit" "full" ];
           }
           {
@@ -14494,6 +16748,7 @@ rec {
         edition = "2018";
         links = "wasm_bindgen";
         sha256 = "0av0m0shdg1jxhf66ymjbq03m0qb7ypm297glndm7mri3hxl34ad";
+        libName = "wasm_bindgen_shared";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -14501,9 +16756,10 @@ rec {
       };
       "wasm-streams" = rec {
         crateName = "wasm-streams";
-        version = "0.3.0";
+        version = "0.4.0";
         edition = "2021";
-        sha256 = "1iqa4kmhbsjj8k4q15i1x0x4p3xda0dhbg7zw51mydr4g129sq5l";
+        sha256 = "0ad17c59xb8fffsnbrqbyqz93hb66nzxhizpii31icb31g4w8pdn";
+        libName = "wasm_streams";
         type = [ "cdylib" "rlib" ];
         authors = [
           "Mattias Buelens <mattias@buelens.com>"
@@ -14529,7 +16785,7 @@ rec {
           {
             name = "web-sys";
             packageId = "web-sys";
-            features = [ "AbortSignal" ];
+            features = [ "AbortSignal" "QueuingStrategy" "ReadableStream" "ReadableStreamType" "ReadableWritablePair" "ReadableStreamByobReader" "ReadableStreamReaderMode" "ReadableStreamReadResult" "ReadableStreamByobRequest" "ReadableStreamDefaultReader" "ReadableByteStreamController" "ReadableStreamGetReaderOptions" "ReadableStreamDefaultController" "StreamPipeOptions" "TransformStream" "TransformStreamDefaultController" "Transformer" "UnderlyingSink" "UnderlyingSource" "WritableStream" "WritableStreamDefaultController" "WritableStreamDefaultWriter" ];
           }
         ];
         devDependencies = [
@@ -14546,6 +16802,7 @@ rec {
         version = "0.3.67";
         edition = "2018";
         sha256 = "1vfjjj3i49gy8bh8znnqhak1hx7xj9c2a3jzc0wpmgp0nqrj7kaq";
+        libName = "web_sys";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -15008,13 +17265,14 @@ rec {
           "XrViewerPose" = [ "XrPose" ];
           "XrWebGlLayer" = [ "EventTarget" "XrLayer" ];
         };
-        resolvedDefaultFeatures = [ "AbortController" "AbortSignal" "Blob" "BlobPropertyBag" "CanvasRenderingContext2d" "Document" "DomRect" "DomRectReadOnly" "Element" "Event" "EventTarget" "File" "FormData" "Headers" "HtmlCanvasElement" "HtmlElement" "MessageEvent" "Node" "ReadableStream" "Request" "RequestCredentials" "RequestInit" "RequestMode" "Response" "ServiceWorkerGlobalScope" "Window" "Worker" "WorkerGlobalScope" ];
+        resolvedDefaultFeatures = [ "AbortController" "AbortSignal" "Blob" "BlobPropertyBag" "CanvasRenderingContext2d" "Document" "DomRect" "DomRectReadOnly" "Element" "Event" "EventTarget" "File" "FormData" "Headers" "HtmlCanvasElement" "HtmlElement" "MessageEvent" "Node" "QueuingStrategy" "ReadableByteStreamController" "ReadableStream" "ReadableStreamByobReader" "ReadableStreamByobRequest" "ReadableStreamDefaultController" "ReadableStreamDefaultReader" "ReadableStreamGetReaderOptions" "ReadableStreamReadResult" "ReadableStreamReaderMode" "ReadableStreamType" "ReadableWritablePair" "Request" "RequestCredentials" "RequestInit" "RequestMode" "Response" "ServiceWorkerGlobalScope" "StreamPipeOptions" "TransformStream" "TransformStreamDefaultController" "Transformer" "UnderlyingSink" "UnderlyingSource" "Window" "Worker" "WorkerGlobalScope" "WritableStream" "WritableStreamDefaultController" "WritableStreamDefaultWriter" ];
       };
       "web-time" = rec {
         crateName = "web-time";
         version = "1.1.0";
         edition = "2021";
         sha256 = "1fx05yqx83dhx628wb70fyy10yjfq1jpl20qfqhdkymi13rq0ras";
+        libName = "web_time";
         dependencies = [
           {
             name = "js-sys";
@@ -15032,80 +17290,6 @@ rec {
           "serde" = [ "dep:serde" ];
         };
       };
-      "which 4.4.2" = rec {
-        crateName = "which";
-        version = "4.4.2";
-        edition = "2021";
-        sha256 = "1ixzmx3svsv5hbdvd8vdhd3qwvf6ns8jdpif1wmwsy10k90j9fl7";
-        authors = [
-          "Harry Fei <tiziyuanfang@gmail.com>"
-        ];
-        dependencies = [
-          {
-            name = "either";
-            packageId = "either";
-          }
-          {
-            name = "home";
-            packageId = "home";
-            target = { target, features }: ((target."windows" or false) || (target."unix" or false) || ("redox" == target."os" or null));
-          }
-          {
-            name = "once_cell";
-            packageId = "once_cell";
-            target = { target, features }: (target."windows" or false);
-          }
-          {
-            name = "rustix";
-            packageId = "rustix";
-            usesDefaultFeatures = false;
-            features = [ "fs" "std" ];
-          }
-        ];
-        features = {
-          "regex" = [ "dep:regex" ];
-        };
-      };
-      "which 5.0.0" = rec {
-        crateName = "which";
-        version = "5.0.0";
-        edition = "2021";
-        sha256 = "053fpbczryyn8lcbpkvwl8v2rzld0pr30r5lh1cxv87kjs2ymwwv";
-        authors = [
-          "Harry Fei <tiziyuanfang@gmail.com>"
-        ];
-        dependencies = [
-          {
-            name = "either";
-            packageId = "either";
-          }
-          {
-            name = "home";
-            packageId = "home";
-            target = { target, features }: ((target."windows" or false) || (target."unix" or false) || ("redox" == target."os" or null));
-          }
-          {
-            name = "once_cell";
-            packageId = "once_cell";
-            target = { target, features }: (target."windows" or false);
-          }
-          {
-            name = "rustix";
-            packageId = "rustix";
-            usesDefaultFeatures = false;
-            features = [ "fs" "std" ];
-          }
-          {
-            name = "windows-sys";
-            packageId = "windows-sys 0.48.0";
-            target = { target, features }: (target."windows" or false);
-            features = [ "Win32_Storage_FileSystem" "Win32_Foundation" ];
-          }
-        ];
-        features = {
-          "regex" = [ "dep:regex" ];
-        };
-      };
       "winapi" = rec {
         crateName = "winapi";
         version = "0.3.9";
@@ -15118,12 +17302,12 @@ rec {
           {
             name = "winapi-i686-pc-windows-gnu";
             packageId = "winapi-i686-pc-windows-gnu";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "i686-pc-windows-gnu");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "i686-pc-windows-gnu");
           }
           {
             name = "winapi-x86_64-pc-windows-gnu";
             packageId = "winapi-x86_64-pc-windows-gnu";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnu");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "x86_64-pc-windows-gnu");
           }
         ];
         features = {
@@ -15136,6 +17320,7 @@ rec {
         version = "0.4.0";
         edition = "2015";
         sha256 = "1dmpa6mvcvzz16zg6d5vrfy4bxgg541wxrcip7cnshi06v38ffxc";
+        libName = "winapi_i686_pc_windows_gnu";
         authors = [
           "Peter Atashian <retep998@gmail.com>"
         ];
@@ -15146,6 +17331,7 @@ rec {
         version = "0.1.6";
         edition = "2021";
         sha256 = "15i5lm39wd44004i9d5qspry2cynkrpvwzghr6s2c3dsk28nz7pj";
+        libName = "winapi_util";
         authors = [
           "Andrew Gallant <jamslam@gmail.com>"
         ];
@@ -15164,33 +17350,858 @@ rec {
         version = "0.4.0";
         edition = "2015";
         sha256 = "0gqq64czqb64kskjryj8isp62m2sgvx25yyj3kpc2myh85w24bki";
+        libName = "winapi_x86_64_pc_windows_gnu";
         authors = [
           "Peter Atashian <retep998@gmail.com>"
         ];
 
       };
-      "windows-core" = rec {
+      "windows" = rec {
+        crateName = "windows";
+        version = "0.54.0";
+        edition = "2021";
+        sha256 = "0j8vd8sg2rbln6g3a608qg1a7r2lwxcga78mmxjjin5ybmrfallj";
+        authors = [
+          "Microsoft"
+        ];
+        dependencies = [
+          {
+            name = "windows-core";
+            packageId = "windows-core 0.54.0";
+          }
+          {
+            name = "windows-targets";
+            packageId = "windows-targets 0.52.6";
+          }
+        ];
+        features = {
+          "AI" = [ "Foundation" ];
+          "AI_MachineLearning" = [ "AI" ];
+          "ApplicationModel" = [ "Foundation" ];
+          "ApplicationModel_Activation" = [ "ApplicationModel" ];
+          "ApplicationModel_AppExtensions" = [ "ApplicationModel" ];
+          "ApplicationModel_AppService" = [ "ApplicationModel" ];
+          "ApplicationModel_Appointments" = [ "ApplicationModel" ];
+          "ApplicationModel_Appointments_AppointmentsProvider" = [ "ApplicationModel_Appointments" ];
+          "ApplicationModel_Appointments_DataProvider" = [ "ApplicationModel_Appointments" ];
+          "ApplicationModel_Background" = [ "ApplicationModel" ];
+          "ApplicationModel_Calls" = [ "ApplicationModel" ];
+          "ApplicationModel_Calls_Background" = [ "ApplicationModel_Calls" ];
+          "ApplicationModel_Calls_Provider" = [ "ApplicationModel_Calls" ];
+          "ApplicationModel_Chat" = [ "ApplicationModel" ];
+          "ApplicationModel_CommunicationBlocking" = [ "ApplicationModel" ];
+          "ApplicationModel_Contacts" = [ "ApplicationModel" ];
+          "ApplicationModel_Contacts_DataProvider" = [ "ApplicationModel_Contacts" ];
+          "ApplicationModel_Contacts_Provider" = [ "ApplicationModel_Contacts" ];
+          "ApplicationModel_ConversationalAgent" = [ "ApplicationModel" ];
+          "ApplicationModel_Core" = [ "ApplicationModel" ];
+          "ApplicationModel_DataTransfer" = [ "ApplicationModel" ];
+          "ApplicationModel_DataTransfer_DragDrop" = [ "ApplicationModel_DataTransfer" ];
+          "ApplicationModel_DataTransfer_DragDrop_Core" = [ "ApplicationModel_DataTransfer_DragDrop" ];
+          "ApplicationModel_DataTransfer_ShareTarget" = [ "ApplicationModel_DataTransfer" ];
+          "ApplicationModel_Email" = [ "ApplicationModel" ];
+          "ApplicationModel_Email_DataProvider" = [ "ApplicationModel_Email" ];
+          "ApplicationModel_ExtendedExecution" = [ "ApplicationModel" ];
+          "ApplicationModel_ExtendedExecution_Foreground" = [ "ApplicationModel_ExtendedExecution" ];
+          "ApplicationModel_Holographic" = [ "ApplicationModel" ];
+          "ApplicationModel_LockScreen" = [ "ApplicationModel" ];
+          "ApplicationModel_Payments" = [ "ApplicationModel" ];
+          "ApplicationModel_Payments_Provider" = [ "ApplicationModel_Payments" ];
+          "ApplicationModel_Preview" = [ "ApplicationModel" ];
+          "ApplicationModel_Preview_Holographic" = [ "ApplicationModel_Preview" ];
+          "ApplicationModel_Preview_InkWorkspace" = [ "ApplicationModel_Preview" ];
+          "ApplicationModel_Preview_Notes" = [ "ApplicationModel_Preview" ];
+          "ApplicationModel_Resources" = [ "ApplicationModel" ];
+          "ApplicationModel_Resources_Core" = [ "ApplicationModel_Resources" ];
+          "ApplicationModel_Resources_Management" = [ "ApplicationModel_Resources" ];
+          "ApplicationModel_Search" = [ "ApplicationModel" ];
+          "ApplicationModel_Search_Core" = [ "ApplicationModel_Search" ];
+          "ApplicationModel_Store" = [ "ApplicationModel" ];
+          "ApplicationModel_Store_LicenseManagement" = [ "ApplicationModel_Store" ];
+          "ApplicationModel_Store_Preview" = [ "ApplicationModel_Store" ];
+          "ApplicationModel_Store_Preview_InstallControl" = [ "ApplicationModel_Store_Preview" ];
+          "ApplicationModel_UserActivities" = [ "ApplicationModel" ];
+          "ApplicationModel_UserActivities_Core" = [ "ApplicationModel_UserActivities" ];
+          "ApplicationModel_UserDataAccounts" = [ "ApplicationModel" ];
+          "ApplicationModel_UserDataAccounts_Provider" = [ "ApplicationModel_UserDataAccounts" ];
+          "ApplicationModel_UserDataAccounts_SystemAccess" = [ "ApplicationModel_UserDataAccounts" ];
+          "ApplicationModel_UserDataTasks" = [ "ApplicationModel" ];
+          "ApplicationModel_UserDataTasks_DataProvider" = [ "ApplicationModel_UserDataTasks" ];
+          "ApplicationModel_VoiceCommands" = [ "ApplicationModel" ];
+          "ApplicationModel_Wallet" = [ "ApplicationModel" ];
+          "ApplicationModel_Wallet_System" = [ "ApplicationModel_Wallet" ];
+          "Data" = [ "Foundation" ];
+          "Data_Html" = [ "Data" ];
+          "Data_Json" = [ "Data" ];
+          "Data_Pdf" = [ "Data" ];
+          "Data_Text" = [ "Data" ];
+          "Data_Xml" = [ "Data" ];
+          "Data_Xml_Dom" = [ "Data_Xml" ];
+          "Data_Xml_Xsl" = [ "Data_Xml" ];
+          "Devices" = [ "Foundation" ];
+          "Devices_Adc" = [ "Devices" ];
+          "Devices_Adc_Provider" = [ "Devices_Adc" ];
+          "Devices_Background" = [ "Devices" ];
+          "Devices_Bluetooth" = [ "Devices" ];
+          "Devices_Bluetooth_Advertisement" = [ "Devices_Bluetooth" ];
+          "Devices_Bluetooth_Background" = [ "Devices_Bluetooth" ];
+          "Devices_Bluetooth_GenericAttributeProfile" = [ "Devices_Bluetooth" ];
+          "Devices_Bluetooth_Rfcomm" = [ "Devices_Bluetooth" ];
+          "Devices_Custom" = [ "Devices" ];
+          "Devices_Display" = [ "Devices" ];
+          "Devices_Display_Core" = [ "Devices_Display" ];
+          "Devices_Enumeration" = [ "Devices" ];
+          "Devices_Enumeration_Pnp" = [ "Devices_Enumeration" ];
+          "Devices_Geolocation" = [ "Devices" ];
+          "Devices_Geolocation_Geofencing" = [ "Devices_Geolocation" ];
+          "Devices_Geolocation_Provider" = [ "Devices_Geolocation" ];
+          "Devices_Gpio" = [ "Devices" ];
+          "Devices_Gpio_Provider" = [ "Devices_Gpio" ];
+          "Devices_Haptics" = [ "Devices" ];
+          "Devices_HumanInterfaceDevice" = [ "Devices" ];
+          "Devices_I2c" = [ "Devices" ];
+          "Devices_I2c_Provider" = [ "Devices_I2c" ];
+          "Devices_Input" = [ "Devices" ];
+          "Devices_Input_Preview" = [ "Devices_Input" ];
+          "Devices_Lights" = [ "Devices" ];
+          "Devices_Lights_Effects" = [ "Devices_Lights" ];
+          "Devices_Midi" = [ "Devices" ];
+          "Devices_PointOfService" = [ "Devices" ];
+          "Devices_PointOfService_Provider" = [ "Devices_PointOfService" ];
+          "Devices_Portable" = [ "Devices" ];
+          "Devices_Power" = [ "Devices" ];
+          "Devices_Printers" = [ "Devices" ];
+          "Devices_Printers_Extensions" = [ "Devices_Printers" ];
+          "Devices_Pwm" = [ "Devices" ];
+          "Devices_Pwm_Provider" = [ "Devices_Pwm" ];
+          "Devices_Radios" = [ "Devices" ];
+          "Devices_Scanners" = [ "Devices" ];
+          "Devices_Sensors" = [ "Devices" ];
+          "Devices_Sensors_Custom" = [ "Devices_Sensors" ];
+          "Devices_SerialCommunication" = [ "Devices" ];
+          "Devices_SmartCards" = [ "Devices" ];
+          "Devices_Sms" = [ "Devices" ];
+          "Devices_Spi" = [ "Devices" ];
+          "Devices_Spi_Provider" = [ "Devices_Spi" ];
+          "Devices_Usb" = [ "Devices" ];
+          "Devices_WiFi" = [ "Devices" ];
+          "Devices_WiFiDirect" = [ "Devices" ];
+          "Devices_WiFiDirect_Services" = [ "Devices_WiFiDirect" ];
+          "Embedded" = [ "Foundation" ];
+          "Embedded_DeviceLockdown" = [ "Embedded" ];
+          "Foundation_Collections" = [ "Foundation" ];
+          "Foundation_Diagnostics" = [ "Foundation" ];
+          "Foundation_Metadata" = [ "Foundation" ];
+          "Foundation_Numerics" = [ "Foundation" ];
+          "Gaming" = [ "Foundation" ];
+          "Gaming_Input" = [ "Gaming" ];
+          "Gaming_Input_Custom" = [ "Gaming_Input" ];
+          "Gaming_Input_ForceFeedback" = [ "Gaming_Input" ];
+          "Gaming_Input_Preview" = [ "Gaming_Input" ];
+          "Gaming_Preview" = [ "Gaming" ];
+          "Gaming_Preview_GamesEnumeration" = [ "Gaming_Preview" ];
+          "Gaming_UI" = [ "Gaming" ];
+          "Gaming_XboxLive" = [ "Gaming" ];
+          "Gaming_XboxLive_Storage" = [ "Gaming_XboxLive" ];
+          "Globalization" = [ "Foundation" ];
+          "Globalization_Collation" = [ "Globalization" ];
+          "Globalization_DateTimeFormatting" = [ "Globalization" ];
+          "Globalization_Fonts" = [ "Globalization" ];
+          "Globalization_NumberFormatting" = [ "Globalization" ];
+          "Globalization_PhoneNumberFormatting" = [ "Globalization" ];
+          "Graphics" = [ "Foundation" ];
+          "Graphics_Capture" = [ "Graphics" ];
+          "Graphics_DirectX" = [ "Graphics" ];
+          "Graphics_DirectX_Direct3D11" = [ "Graphics_DirectX" ];
+          "Graphics_Display" = [ "Graphics" ];
+          "Graphics_Display_Core" = [ "Graphics_Display" ];
+          "Graphics_Effects" = [ "Graphics" ];
+          "Graphics_Holographic" = [ "Graphics" ];
+          "Graphics_Imaging" = [ "Graphics" ];
+          "Graphics_Printing" = [ "Graphics" ];
+          "Graphics_Printing3D" = [ "Graphics" ];
+          "Graphics_Printing_OptionDetails" = [ "Graphics_Printing" ];
+          "Graphics_Printing_PrintSupport" = [ "Graphics_Printing" ];
+          "Graphics_Printing_PrintTicket" = [ "Graphics_Printing" ];
+          "Graphics_Printing_Workflow" = [ "Graphics_Printing" ];
+          "Management" = [ "Foundation" ];
+          "Management_Core" = [ "Management" ];
+          "Management_Deployment" = [ "Management" ];
+          "Management_Deployment_Preview" = [ "Management_Deployment" ];
+          "Management_Policies" = [ "Management" ];
+          "Management_Update" = [ "Management" ];
+          "Management_Workplace" = [ "Management" ];
+          "Media" = [ "Foundation" ];
+          "Media_AppBroadcasting" = [ "Media" ];
+          "Media_AppRecording" = [ "Media" ];
+          "Media_Audio" = [ "Media" ];
+          "Media_Capture" = [ "Media" ];
+          "Media_Capture_Core" = [ "Media_Capture" ];
+          "Media_Capture_Frames" = [ "Media_Capture" ];
+          "Media_Casting" = [ "Media" ];
+          "Media_ClosedCaptioning" = [ "Media" ];
+          "Media_ContentRestrictions" = [ "Media" ];
+          "Media_Control" = [ "Media" ];
+          "Media_Core" = [ "Media" ];
+          "Media_Core_Preview" = [ "Media_Core" ];
+          "Media_Devices" = [ "Media" ];
+          "Media_Devices_Core" = [ "Media_Devices" ];
+          "Media_DialProtocol" = [ "Media" ];
+          "Media_Editing" = [ "Media" ];
+          "Media_Effects" = [ "Media" ];
+          "Media_FaceAnalysis" = [ "Media" ];
+          "Media_Import" = [ "Media" ];
+          "Media_MediaProperties" = [ "Media" ];
+          "Media_Miracast" = [ "Media" ];
+          "Media_Ocr" = [ "Media" ];
+          "Media_PlayTo" = [ "Media" ];
+          "Media_Playback" = [ "Media" ];
+          "Media_Playlists" = [ "Media" ];
+          "Media_Protection" = [ "Media" ];
+          "Media_Protection_PlayReady" = [ "Media_Protection" ];
+          "Media_Render" = [ "Media" ];
+          "Media_SpeechRecognition" = [ "Media" ];
+          "Media_SpeechSynthesis" = [ "Media" ];
+          "Media_Streaming" = [ "Media" ];
+          "Media_Streaming_Adaptive" = [ "Media_Streaming" ];
+          "Media_Transcoding" = [ "Media" ];
+          "Networking" = [ "Foundation" ];
+          "Networking_BackgroundTransfer" = [ "Networking" ];
+          "Networking_Connectivity" = [ "Networking" ];
+          "Networking_NetworkOperators" = [ "Networking" ];
+          "Networking_Proximity" = [ "Networking" ];
+          "Networking_PushNotifications" = [ "Networking" ];
+          "Networking_ServiceDiscovery" = [ "Networking" ];
+          "Networking_ServiceDiscovery_Dnssd" = [ "Networking_ServiceDiscovery" ];
+          "Networking_Sockets" = [ "Networking" ];
+          "Networking_Vpn" = [ "Networking" ];
+          "Networking_XboxLive" = [ "Networking" ];
+          "Perception" = [ "Foundation" ];
+          "Perception_Automation" = [ "Perception" ];
+          "Perception_Automation_Core" = [ "Perception_Automation" ];
+          "Perception_People" = [ "Perception" ];
+          "Perception_Spatial" = [ "Perception" ];
+          "Perception_Spatial_Preview" = [ "Perception_Spatial" ];
+          "Perception_Spatial_Surfaces" = [ "Perception_Spatial" ];
+          "Phone" = [ "Foundation" ];
+          "Phone_ApplicationModel" = [ "Phone" ];
+          "Phone_Devices" = [ "Phone" ];
+          "Phone_Devices_Notification" = [ "Phone_Devices" ];
+          "Phone_Devices_Power" = [ "Phone_Devices" ];
+          "Phone_Management" = [ "Phone" ];
+          "Phone_Management_Deployment" = [ "Phone_Management" ];
+          "Phone_Media" = [ "Phone" ];
+          "Phone_Media_Devices" = [ "Phone_Media" ];
+          "Phone_Notification" = [ "Phone" ];
+          "Phone_Notification_Management" = [ "Phone_Notification" ];
+          "Phone_PersonalInformation" = [ "Phone" ];
+          "Phone_PersonalInformation_Provisioning" = [ "Phone_PersonalInformation" ];
+          "Phone_Speech" = [ "Phone" ];
+          "Phone_Speech_Recognition" = [ "Phone_Speech" ];
+          "Phone_StartScreen" = [ "Phone" ];
+          "Phone_System" = [ "Phone" ];
+          "Phone_System_Power" = [ "Phone_System" ];
+          "Phone_System_Profile" = [ "Phone_System" ];
+          "Phone_System_UserProfile" = [ "Phone_System" ];
+          "Phone_System_UserProfile_GameServices" = [ "Phone_System_UserProfile" ];
+          "Phone_System_UserProfile_GameServices_Core" = [ "Phone_System_UserProfile_GameServices" ];
+          "Phone_UI" = [ "Phone" ];
+          "Phone_UI_Input" = [ "Phone_UI" ];
+          "Security" = [ "Foundation" ];
+          "Security_Authentication" = [ "Security" ];
+          "Security_Authentication_Identity" = [ "Security_Authentication" ];
+          "Security_Authentication_Identity_Core" = [ "Security_Authentication_Identity" ];
+          "Security_Authentication_OnlineId" = [ "Security_Authentication" ];
+          "Security_Authentication_Web" = [ "Security_Authentication" ];
+          "Security_Authentication_Web_Core" = [ "Security_Authentication_Web" ];
+          "Security_Authentication_Web_Provider" = [ "Security_Authentication_Web" ];
+          "Security_Authorization" = [ "Security" ];
+          "Security_Authorization_AppCapabilityAccess" = [ "Security_Authorization" ];
+          "Security_Credentials" = [ "Security" ];
+          "Security_Credentials_UI" = [ "Security_Credentials" ];
+          "Security_Cryptography" = [ "Security" ];
+          "Security_Cryptography_Certificates" = [ "Security_Cryptography" ];
+          "Security_Cryptography_Core" = [ "Security_Cryptography" ];
+          "Security_Cryptography_DataProtection" = [ "Security_Cryptography" ];
+          "Security_DataProtection" = [ "Security" ];
+          "Security_EnterpriseData" = [ "Security" ];
+          "Security_ExchangeActiveSyncProvisioning" = [ "Security" ];
+          "Security_Isolation" = [ "Security" ];
+          "Services" = [ "Foundation" ];
+          "Services_Maps" = [ "Services" ];
+          "Services_Maps_Guidance" = [ "Services_Maps" ];
+          "Services_Maps_LocalSearch" = [ "Services_Maps" ];
+          "Services_Maps_OfflineMaps" = [ "Services_Maps" ];
+          "Services_Store" = [ "Services" ];
+          "Services_TargetedContent" = [ "Services" ];
+          "Storage" = [ "Foundation" ];
+          "Storage_AccessCache" = [ "Storage" ];
+          "Storage_BulkAccess" = [ "Storage" ];
+          "Storage_Compression" = [ "Storage" ];
+          "Storage_FileProperties" = [ "Storage" ];
+          "Storage_Pickers" = [ "Storage" ];
+          "Storage_Pickers_Provider" = [ "Storage_Pickers" ];
+          "Storage_Provider" = [ "Storage" ];
+          "Storage_Search" = [ "Storage" ];
+          "Storage_Streams" = [ "Storage" ];
+          "System" = [ "Foundation" ];
+          "System_Diagnostics" = [ "System" ];
+          "System_Diagnostics_DevicePortal" = [ "System_Diagnostics" ];
+          "System_Diagnostics_Telemetry" = [ "System_Diagnostics" ];
+          "System_Diagnostics_TraceReporting" = [ "System_Diagnostics" ];
+          "System_Display" = [ "System" ];
+          "System_Implementation" = [ "System" ];
+          "System_Implementation_FileExplorer" = [ "System_Implementation" ];
+          "System_Inventory" = [ "System" ];
+          "System_Power" = [ "System" ];
+          "System_Profile" = [ "System" ];
+          "System_Profile_SystemManufacturers" = [ "System_Profile" ];
+          "System_RemoteDesktop" = [ "System" ];
+          "System_RemoteDesktop_Input" = [ "System_RemoteDesktop" ];
+          "System_RemoteDesktop_Provider" = [ "System_RemoteDesktop" ];
+          "System_RemoteSystems" = [ "System" ];
+          "System_Threading" = [ "System" ];
+          "System_Threading_Core" = [ "System_Threading" ];
+          "System_Update" = [ "System" ];
+          "System_UserProfile" = [ "System" ];
+          "UI" = [ "Foundation" ];
+          "UI_Accessibility" = [ "UI" ];
+          "UI_ApplicationSettings" = [ "UI" ];
+          "UI_Composition" = [ "UI" ];
+          "UI_Composition_Core" = [ "UI_Composition" ];
+          "UI_Composition_Desktop" = [ "UI_Composition" ];
+          "UI_Composition_Diagnostics" = [ "UI_Composition" ];
+          "UI_Composition_Effects" = [ "UI_Composition" ];
+          "UI_Composition_Interactions" = [ "UI_Composition" ];
+          "UI_Composition_Scenes" = [ "UI_Composition" ];
+          "UI_Core" = [ "UI" ];
+          "UI_Core_AnimationMetrics" = [ "UI_Core" ];
+          "UI_Core_Preview" = [ "UI_Core" ];
+          "UI_Input" = [ "UI" ];
+          "UI_Input_Core" = [ "UI_Input" ];
+          "UI_Input_Inking" = [ "UI_Input" ];
+          "UI_Input_Inking_Analysis" = [ "UI_Input_Inking" ];
+          "UI_Input_Inking_Core" = [ "UI_Input_Inking" ];
+          "UI_Input_Inking_Preview" = [ "UI_Input_Inking" ];
+          "UI_Input_Preview" = [ "UI_Input" ];
+          "UI_Input_Preview_Injection" = [ "UI_Input_Preview" ];
+          "UI_Input_Spatial" = [ "UI_Input" ];
+          "UI_Notifications" = [ "UI" ];
+          "UI_Notifications_Management" = [ "UI_Notifications" ];
+          "UI_Notifications_Preview" = [ "UI_Notifications" ];
+          "UI_Popups" = [ "UI" ];
+          "UI_Shell" = [ "UI" ];
+          "UI_StartScreen" = [ "UI" ];
+          "UI_Text" = [ "UI" ];
+          "UI_Text_Core" = [ "UI_Text" ];
+          "UI_UIAutomation" = [ "UI" ];
+          "UI_UIAutomation_Core" = [ "UI_UIAutomation" ];
+          "UI_ViewManagement" = [ "UI" ];
+          "UI_ViewManagement_Core" = [ "UI_ViewManagement" ];
+          "UI_WebUI" = [ "UI" ];
+          "UI_WebUI_Core" = [ "UI_WebUI" ];
+          "UI_WindowManagement" = [ "UI" ];
+          "UI_WindowManagement_Preview" = [ "UI_WindowManagement" ];
+          "Wdk" = [ "Win32_Foundation" ];
+          "Wdk_Devices" = [ "Wdk" ];
+          "Wdk_Devices_HumanInterfaceDevice" = [ "Wdk_Devices" ];
+          "Wdk_Foundation" = [ "Wdk" ];
+          "Wdk_Graphics" = [ "Wdk" ];
+          "Wdk_Graphics_Direct3D" = [ "Wdk_Graphics" ];
+          "Wdk_NetworkManagement" = [ "Wdk" ];
+          "Wdk_NetworkManagement_Ndis" = [ "Wdk_NetworkManagement" ];
+          "Wdk_NetworkManagement_WindowsFilteringPlatform" = [ "Wdk_NetworkManagement" ];
+          "Wdk_Storage" = [ "Wdk" ];
+          "Wdk_Storage_FileSystem" = [ "Wdk_Storage" ];
+          "Wdk_Storage_FileSystem_Minifilters" = [ "Wdk_Storage_FileSystem" ];
+          "Wdk_System" = [ "Wdk" ];
+          "Wdk_System_IO" = [ "Wdk_System" ];
+          "Wdk_System_OfflineRegistry" = [ "Wdk_System" ];
+          "Wdk_System_Registry" = [ "Wdk_System" ];
+          "Wdk_System_SystemInformation" = [ "Wdk_System" ];
+          "Wdk_System_SystemServices" = [ "Wdk_System" ];
+          "Wdk_System_Threading" = [ "Wdk_System" ];
+          "Web" = [ "Foundation" ];
+          "Web_AtomPub" = [ "Web" ];
+          "Web_Http" = [ "Web" ];
+          "Web_Http_Diagnostics" = [ "Web_Http" ];
+          "Web_Http_Filters" = [ "Web_Http" ];
+          "Web_Http_Headers" = [ "Web_Http" ];
+          "Web_Syndication" = [ "Web" ];
+          "Web_UI" = [ "Web" ];
+          "Web_UI_Interop" = [ "Web_UI" ];
+          "Win32" = [ "Win32_Foundation" ];
+          "Win32_AI" = [ "Win32" ];
+          "Win32_AI_MachineLearning" = [ "Win32_AI" ];
+          "Win32_AI_MachineLearning_DirectML" = [ "Win32_AI_MachineLearning" ];
+          "Win32_AI_MachineLearning_WinML" = [ "Win32_AI_MachineLearning" ];
+          "Win32_Data" = [ "Win32" ];
+          "Win32_Data_HtmlHelp" = [ "Win32_Data" ];
+          "Win32_Data_RightsManagement" = [ "Win32_Data" ];
+          "Win32_Data_Xml" = [ "Win32_Data" ];
+          "Win32_Data_Xml_MsXml" = [ "Win32_Data_Xml" ];
+          "Win32_Data_Xml_XmlLite" = [ "Win32_Data_Xml" ];
+          "Win32_Devices" = [ "Win32" ];
+          "Win32_Devices_AllJoyn" = [ "Win32_Devices" ];
+          "Win32_Devices_BiometricFramework" = [ "Win32_Devices" ];
+          "Win32_Devices_Bluetooth" = [ "Win32_Devices" ];
+          "Win32_Devices_Communication" = [ "Win32_Devices" ];
+          "Win32_Devices_DeviceAccess" = [ "Win32_Devices" ];
+          "Win32_Devices_DeviceAndDriverInstallation" = [ "Win32_Devices" ];
+          "Win32_Devices_DeviceQuery" = [ "Win32_Devices" ];
+          "Win32_Devices_Display" = [ "Win32_Devices" ];
+          "Win32_Devices_Enumeration" = [ "Win32_Devices" ];
+          "Win32_Devices_Enumeration_Pnp" = [ "Win32_Devices_Enumeration" ];
+          "Win32_Devices_Fax" = [ "Win32_Devices" ];
+          "Win32_Devices_FunctionDiscovery" = [ "Win32_Devices" ];
+          "Win32_Devices_Geolocation" = [ "Win32_Devices" ];
+          "Win32_Devices_HumanInterfaceDevice" = [ "Win32_Devices" ];
+          "Win32_Devices_ImageAcquisition" = [ "Win32_Devices" ];
+          "Win32_Devices_PortableDevices" = [ "Win32_Devices" ];
+          "Win32_Devices_Properties" = [ "Win32_Devices" ];
+          "Win32_Devices_Pwm" = [ "Win32_Devices" ];
+          "Win32_Devices_Sensors" = [ "Win32_Devices" ];
+          "Win32_Devices_SerialCommunication" = [ "Win32_Devices" ];
+          "Win32_Devices_Tapi" = [ "Win32_Devices" ];
+          "Win32_Devices_Usb" = [ "Win32_Devices" ];
+          "Win32_Devices_WebServicesOnDevices" = [ "Win32_Devices" ];
+          "Win32_Foundation" = [ "Win32" ];
+          "Win32_Gaming" = [ "Win32" ];
+          "Win32_Globalization" = [ "Win32" ];
+          "Win32_Graphics" = [ "Win32" ];
+          "Win32_Graphics_CompositionSwapchain" = [ "Win32_Graphics" ];
+          "Win32_Graphics_DXCore" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct2D" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct2D_Common" = [ "Win32_Graphics_Direct2D" ];
+          "Win32_Graphics_Direct3D" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D10" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D11" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D11on12" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D12" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D9" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D9on12" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D_Dxc" = [ "Win32_Graphics_Direct3D" ];
+          "Win32_Graphics_Direct3D_Fxc" = [ "Win32_Graphics_Direct3D" ];
+          "Win32_Graphics_DirectComposition" = [ "Win32_Graphics" ];
+          "Win32_Graphics_DirectDraw" = [ "Win32_Graphics" ];
+          "Win32_Graphics_DirectManipulation" = [ "Win32_Graphics" ];
+          "Win32_Graphics_DirectWrite" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Dwm" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Dxgi" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Dxgi_Common" = [ "Win32_Graphics_Dxgi" ];
+          "Win32_Graphics_Gdi" = [ "Win32_Graphics" ];
+          "Win32_Graphics_GdiPlus" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Hlsl" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Imaging" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Imaging_D2D" = [ "Win32_Graphics_Imaging" ];
+          "Win32_Graphics_OpenGL" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Printing" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Printing_PrintTicket" = [ "Win32_Graphics_Printing" ];
+          "Win32_Management" = [ "Win32" ];
+          "Win32_Management_MobileDeviceManagementRegistration" = [ "Win32_Management" ];
+          "Win32_Media" = [ "Win32" ];
+          "Win32_Media_Audio" = [ "Win32_Media" ];
+          "Win32_Media_Audio_Apo" = [ "Win32_Media_Audio" ];
+          "Win32_Media_Audio_DirectMusic" = [ "Win32_Media_Audio" ];
+          "Win32_Media_Audio_DirectSound" = [ "Win32_Media_Audio" ];
+          "Win32_Media_Audio_Endpoints" = [ "Win32_Media_Audio" ];
+          "Win32_Media_Audio_XAudio2" = [ "Win32_Media_Audio" ];
+          "Win32_Media_DeviceManager" = [ "Win32_Media" ];
+          "Win32_Media_DirectShow" = [ "Win32_Media" ];
+          "Win32_Media_DirectShow_Tv" = [ "Win32_Media_DirectShow" ];
+          "Win32_Media_DirectShow_Xml" = [ "Win32_Media_DirectShow" ];
+          "Win32_Media_DxMediaObjects" = [ "Win32_Media" ];
+          "Win32_Media_KernelStreaming" = [ "Win32_Media" ];
+          "Win32_Media_LibrarySharingServices" = [ "Win32_Media" ];
+          "Win32_Media_MediaFoundation" = [ "Win32_Media" ];
+          "Win32_Media_MediaPlayer" = [ "Win32_Media" ];
+          "Win32_Media_Multimedia" = [ "Win32_Media" ];
+          "Win32_Media_PictureAcquisition" = [ "Win32_Media" ];
+          "Win32_Media_Speech" = [ "Win32_Media" ];
+          "Win32_Media_Streaming" = [ "Win32_Media" ];
+          "Win32_Media_WindowsMediaFormat" = [ "Win32_Media" ];
+          "Win32_NetworkManagement" = [ "Win32" ];
+          "Win32_NetworkManagement_Dhcp" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Dns" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_InternetConnectionWizard" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_IpHelper" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_MobileBroadband" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Multicast" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Ndis" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_NetBios" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_NetManagement" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_NetShell" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_NetworkDiagnosticsFramework" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_NetworkPolicyServer" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_P2P" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_QoS" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Rras" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Snmp" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WNet" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WebDav" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WiFi" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WindowsConnectNow" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WindowsConnectionManager" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WindowsFilteringPlatform" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WindowsFirewall" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WindowsNetworkVirtualization" = [ "Win32_NetworkManagement" ];
+          "Win32_Networking" = [ "Win32" ];
+          "Win32_Networking_ActiveDirectory" = [ "Win32_Networking" ];
+          "Win32_Networking_BackgroundIntelligentTransferService" = [ "Win32_Networking" ];
+          "Win32_Networking_Clustering" = [ "Win32_Networking" ];
+          "Win32_Networking_HttpServer" = [ "Win32_Networking" ];
+          "Win32_Networking_Ldap" = [ "Win32_Networking" ];
+          "Win32_Networking_NetworkListManager" = [ "Win32_Networking" ];
+          "Win32_Networking_RemoteDifferentialCompression" = [ "Win32_Networking" ];
+          "Win32_Networking_WebSocket" = [ "Win32_Networking" ];
+          "Win32_Networking_WinHttp" = [ "Win32_Networking" ];
+          "Win32_Networking_WinInet" = [ "Win32_Networking" ];
+          "Win32_Networking_WinSock" = [ "Win32_Networking" ];
+          "Win32_Networking_WindowsWebServices" = [ "Win32_Networking" ];
+          "Win32_Security" = [ "Win32" ];
+          "Win32_Security_AppLocker" = [ "Win32_Security" ];
+          "Win32_Security_Authentication" = [ "Win32_Security" ];
+          "Win32_Security_Authentication_Identity" = [ "Win32_Security_Authentication" ];
+          "Win32_Security_Authentication_Identity_Provider" = [ "Win32_Security_Authentication_Identity" ];
+          "Win32_Security_Authorization" = [ "Win32_Security" ];
+          "Win32_Security_Authorization_UI" = [ "Win32_Security_Authorization" ];
+          "Win32_Security_ConfigurationSnapin" = [ "Win32_Security" ];
+          "Win32_Security_Credentials" = [ "Win32_Security" ];
+          "Win32_Security_Cryptography" = [ "Win32_Security" ];
+          "Win32_Security_Cryptography_Catalog" = [ "Win32_Security_Cryptography" ];
+          "Win32_Security_Cryptography_Certificates" = [ "Win32_Security_Cryptography" ];
+          "Win32_Security_Cryptography_Sip" = [ "Win32_Security_Cryptography" ];
+          "Win32_Security_Cryptography_UI" = [ "Win32_Security_Cryptography" ];
+          "Win32_Security_DiagnosticDataQuery" = [ "Win32_Security" ];
+          "Win32_Security_DirectoryServices" = [ "Win32_Security" ];
+          "Win32_Security_EnterpriseData" = [ "Win32_Security" ];
+          "Win32_Security_ExtensibleAuthenticationProtocol" = [ "Win32_Security" ];
+          "Win32_Security_Isolation" = [ "Win32_Security" ];
+          "Win32_Security_LicenseProtection" = [ "Win32_Security" ];
+          "Win32_Security_NetworkAccessProtection" = [ "Win32_Security" ];
+          "Win32_Security_Tpm" = [ "Win32_Security" ];
+          "Win32_Security_WinTrust" = [ "Win32_Security" ];
+          "Win32_Security_WinWlx" = [ "Win32_Security" ];
+          "Win32_Storage" = [ "Win32" ];
+          "Win32_Storage_Cabinets" = [ "Win32_Storage" ];
+          "Win32_Storage_CloudFilters" = [ "Win32_Storage" ];
+          "Win32_Storage_Compression" = [ "Win32_Storage" ];
+          "Win32_Storage_DataDeduplication" = [ "Win32_Storage" ];
+          "Win32_Storage_DistributedFileSystem" = [ "Win32_Storage" ];
+          "Win32_Storage_EnhancedStorage" = [ "Win32_Storage" ];
+          "Win32_Storage_FileHistory" = [ "Win32_Storage" ];
+          "Win32_Storage_FileServerResourceManager" = [ "Win32_Storage" ];
+          "Win32_Storage_FileSystem" = [ "Win32_Storage" ];
+          "Win32_Storage_Imapi" = [ "Win32_Storage" ];
+          "Win32_Storage_IndexServer" = [ "Win32_Storage" ];
+          "Win32_Storage_InstallableFileSystems" = [ "Win32_Storage" ];
+          "Win32_Storage_IscsiDisc" = [ "Win32_Storage" ];
+          "Win32_Storage_Jet" = [ "Win32_Storage" ];
+          "Win32_Storage_Nvme" = [ "Win32_Storage" ];
+          "Win32_Storage_OfflineFiles" = [ "Win32_Storage" ];
+          "Win32_Storage_OperationRecorder" = [ "Win32_Storage" ];
+          "Win32_Storage_Packaging" = [ "Win32_Storage" ];
+          "Win32_Storage_Packaging_Appx" = [ "Win32_Storage_Packaging" ];
+          "Win32_Storage_Packaging_Opc" = [ "Win32_Storage_Packaging" ];
+          "Win32_Storage_ProjectedFileSystem" = [ "Win32_Storage" ];
+          "Win32_Storage_StructuredStorage" = [ "Win32_Storage" ];
+          "Win32_Storage_Vhd" = [ "Win32_Storage" ];
+          "Win32_Storage_VirtualDiskService" = [ "Win32_Storage" ];
+          "Win32_Storage_Vss" = [ "Win32_Storage" ];
+          "Win32_Storage_Xps" = [ "Win32_Storage" ];
+          "Win32_Storage_Xps_Printing" = [ "Win32_Storage_Xps" ];
+          "Win32_System" = [ "Win32" ];
+          "Win32_System_AddressBook" = [ "Win32_System" ];
+          "Win32_System_Antimalware" = [ "Win32_System" ];
+          "Win32_System_ApplicationInstallationAndServicing" = [ "Win32_System" ];
+          "Win32_System_ApplicationVerifier" = [ "Win32_System" ];
+          "Win32_System_AssessmentTool" = [ "Win32_System" ];
+          "Win32_System_ClrHosting" = [ "Win32_System" ];
+          "Win32_System_Com" = [ "Win32_System" ];
+          "Win32_System_Com_CallObj" = [ "Win32_System_Com" ];
+          "Win32_System_Com_ChannelCredentials" = [ "Win32_System_Com" ];
+          "Win32_System_Com_Events" = [ "Win32_System_Com" ];
+          "Win32_System_Com_Marshal" = [ "Win32_System_Com" ];
+          "Win32_System_Com_StructuredStorage" = [ "Win32_System_Com" ];
+          "Win32_System_Com_UI" = [ "Win32_System_Com" ];
+          "Win32_System_Com_Urlmon" = [ "Win32_System_Com" ];
+          "Win32_System_ComponentServices" = [ "Win32_System" ];
+          "Win32_System_Console" = [ "Win32_System" ];
+          "Win32_System_Contacts" = [ "Win32_System" ];
+          "Win32_System_CorrelationVector" = [ "Win32_System" ];
+          "Win32_System_DataExchange" = [ "Win32_System" ];
+          "Win32_System_DeploymentServices" = [ "Win32_System" ];
+          "Win32_System_DesktopSharing" = [ "Win32_System" ];
+          "Win32_System_DeveloperLicensing" = [ "Win32_System" ];
+          "Win32_System_Diagnostics" = [ "Win32_System" ];
+          "Win32_System_Diagnostics_Ceip" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_ClrProfiling" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_Debug" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_Debug_ActiveScript" = [ "Win32_System_Diagnostics_Debug" ];
+          "Win32_System_Diagnostics_Debug_Extensions" = [ "Win32_System_Diagnostics_Debug" ];
+          "Win32_System_Diagnostics_Etw" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_ProcessSnapshotting" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_ToolHelp" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_TraceLogging" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_DistributedTransactionCoordinator" = [ "Win32_System" ];
+          "Win32_System_Environment" = [ "Win32_System" ];
+          "Win32_System_ErrorReporting" = [ "Win32_System" ];
+          "Win32_System_EventCollector" = [ "Win32_System" ];
+          "Win32_System_EventLog" = [ "Win32_System" ];
+          "Win32_System_EventNotificationService" = [ "Win32_System" ];
+          "Win32_System_GroupPolicy" = [ "Win32_System" ];
+          "Win32_System_HostCompute" = [ "Win32_System" ];
+          "Win32_System_HostComputeNetwork" = [ "Win32_System" ];
+          "Win32_System_HostComputeSystem" = [ "Win32_System" ];
+          "Win32_System_Hypervisor" = [ "Win32_System" ];
+          "Win32_System_IO" = [ "Win32_System" ];
+          "Win32_System_Iis" = [ "Win32_System" ];
+          "Win32_System_Ioctl" = [ "Win32_System" ];
+          "Win32_System_JobObjects" = [ "Win32_System" ];
+          "Win32_System_Js" = [ "Win32_System" ];
+          "Win32_System_Kernel" = [ "Win32_System" ];
+          "Win32_System_LibraryLoader" = [ "Win32_System" ];
+          "Win32_System_Mailslots" = [ "Win32_System" ];
+          "Win32_System_Mapi" = [ "Win32_System" ];
+          "Win32_System_Memory" = [ "Win32_System" ];
+          "Win32_System_Memory_NonVolatile" = [ "Win32_System_Memory" ];
+          "Win32_System_MessageQueuing" = [ "Win32_System" ];
+          "Win32_System_MixedReality" = [ "Win32_System" ];
+          "Win32_System_Mmc" = [ "Win32_System" ];
+          "Win32_System_Ole" = [ "Win32_System" ];
+          "Win32_System_ParentalControls" = [ "Win32_System" ];
+          "Win32_System_PasswordManagement" = [ "Win32_System" ];
+          "Win32_System_Performance" = [ "Win32_System" ];
+          "Win32_System_Performance_HardwareCounterProfiling" = [ "Win32_System_Performance" ];
+          "Win32_System_Pipes" = [ "Win32_System" ];
+          "Win32_System_Power" = [ "Win32_System" ];
+          "Win32_System_ProcessStatus" = [ "Win32_System" ];
+          "Win32_System_RealTimeCommunications" = [ "Win32_System" ];
+          "Win32_System_Recovery" = [ "Win32_System" ];
+          "Win32_System_Registry" = [ "Win32_System" ];
+          "Win32_System_RemoteAssistance" = [ "Win32_System" ];
+          "Win32_System_RemoteDesktop" = [ "Win32_System" ];
+          "Win32_System_RemoteManagement" = [ "Win32_System" ];
+          "Win32_System_RestartManager" = [ "Win32_System" ];
+          "Win32_System_Restore" = [ "Win32_System" ];
+          "Win32_System_Rpc" = [ "Win32_System" ];
+          "Win32_System_Search" = [ "Win32_System" ];
+          "Win32_System_Search_Common" = [ "Win32_System_Search" ];
+          "Win32_System_SecurityCenter" = [ "Win32_System" ];
+          "Win32_System_ServerBackup" = [ "Win32_System" ];
+          "Win32_System_Services" = [ "Win32_System" ];
+          "Win32_System_SettingsManagementInfrastructure" = [ "Win32_System" ];
+          "Win32_System_SetupAndMigration" = [ "Win32_System" ];
+          "Win32_System_Shutdown" = [ "Win32_System" ];
+          "Win32_System_SideShow" = [ "Win32_System" ];
+          "Win32_System_StationsAndDesktops" = [ "Win32_System" ];
+          "Win32_System_SubsystemForLinux" = [ "Win32_System" ];
+          "Win32_System_SystemInformation" = [ "Win32_System" ];
+          "Win32_System_SystemServices" = [ "Win32_System" ];
+          "Win32_System_TaskScheduler" = [ "Win32_System" ];
+          "Win32_System_Threading" = [ "Win32_System" ];
+          "Win32_System_Time" = [ "Win32_System" ];
+          "Win32_System_TpmBaseServices" = [ "Win32_System" ];
+          "Win32_System_TransactionServer" = [ "Win32_System" ];
+          "Win32_System_UpdateAgent" = [ "Win32_System" ];
+          "Win32_System_UpdateAssessment" = [ "Win32_System" ];
+          "Win32_System_UserAccessLogging" = [ "Win32_System" ];
+          "Win32_System_Variant" = [ "Win32_System" ];
+          "Win32_System_VirtualDosMachines" = [ "Win32_System" ];
+          "Win32_System_WinRT" = [ "Win32_System" ];
+          "Win32_System_WinRT_AllJoyn" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Composition" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_CoreInputView" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Direct3D11" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Display" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Graphics" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Graphics_Capture" = [ "Win32_System_WinRT_Graphics" ];
+          "Win32_System_WinRT_Graphics_Direct2D" = [ "Win32_System_WinRT_Graphics" ];
+          "Win32_System_WinRT_Graphics_Imaging" = [ "Win32_System_WinRT_Graphics" ];
+          "Win32_System_WinRT_Holographic" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Isolation" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_ML" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Media" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Metadata" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Pdf" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Printing" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Shell" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Storage" = [ "Win32_System_WinRT" ];
+          "Win32_System_WindowsProgramming" = [ "Win32_System" ];
+          "Win32_System_WindowsSync" = [ "Win32_System" ];
+          "Win32_System_Wmi" = [ "Win32_System" ];
+          "Win32_UI" = [ "Win32" ];
+          "Win32_UI_Accessibility" = [ "Win32_UI" ];
+          "Win32_UI_Animation" = [ "Win32_UI" ];
+          "Win32_UI_ColorSystem" = [ "Win32_UI" ];
+          "Win32_UI_Controls" = [ "Win32_UI" ];
+          "Win32_UI_Controls_Dialogs" = [ "Win32_UI_Controls" ];
+          "Win32_UI_Controls_RichEdit" = [ "Win32_UI_Controls" ];
+          "Win32_UI_HiDpi" = [ "Win32_UI" ];
+          "Win32_UI_Input" = [ "Win32_UI" ];
+          "Win32_UI_Input_Ime" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_Ink" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_KeyboardAndMouse" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_Pointer" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_Radial" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_Touch" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_XboxController" = [ "Win32_UI_Input" ];
+          "Win32_UI_InteractionContext" = [ "Win32_UI" ];
+          "Win32_UI_LegacyWindowsEnvironmentFeatures" = [ "Win32_UI" ];
+          "Win32_UI_Magnification" = [ "Win32_UI" ];
+          "Win32_UI_Notifications" = [ "Win32_UI" ];
+          "Win32_UI_Ribbon" = [ "Win32_UI" ];
+          "Win32_UI_Shell" = [ "Win32_UI" ];
+          "Win32_UI_Shell_Common" = [ "Win32_UI_Shell" ];
+          "Win32_UI_Shell_PropertiesSystem" = [ "Win32_UI_Shell" ];
+          "Win32_UI_TabletPC" = [ "Win32_UI" ];
+          "Win32_UI_TextServices" = [ "Win32_UI" ];
+          "Win32_UI_WindowsAndMessaging" = [ "Win32_UI" ];
+          "Win32_UI_Wpf" = [ "Win32_UI" ];
+          "Win32_Web" = [ "Win32" ];
+          "Win32_Web_InternetExplorer" = [ "Win32_Web" ];
+          "implement" = [ "windows-implement" "windows-interface" "windows-core/implement" ];
+          "windows-implement" = [ "dep:windows-implement" ];
+          "windows-interface" = [ "dep:windows-interface" ];
+        };
+        resolvedDefaultFeatures = [ "Win32" "Win32_Foundation" "Win32_System" "Win32_System_Diagnostics" "Win32_System_Diagnostics_Debug" "Win32_System_Kernel" "Win32_System_Memory" "Win32_System_SystemInformation" "default" ];
+      };
+      "windows-core 0.52.0" = rec {
         crateName = "windows-core";
         version = "0.52.0";
         edition = "2021";
         sha256 = "1nc3qv7sy24x0nlnb32f7alzpd6f72l4p24vl65vydbyil669ark";
+        libName = "windows_core";
         authors = [
           "Microsoft"
         ];
         dependencies = [
           {
             name = "windows-targets";
-            packageId = "windows-targets 0.52.0";
+            packageId = "windows-targets 0.52.6";
           }
         ];
         features = { };
         resolvedDefaultFeatures = [ "default" ];
       };
+      "windows-core 0.54.0" = rec {
+        crateName = "windows-core";
+        version = "0.54.0";
+        edition = "2021";
+        sha256 = "0r8x2sgl4qq1h23ldf4z7cj213k0bz7479m8a156h79mi6f1nrhj";
+        libName = "windows_core";
+        authors = [
+          "Microsoft"
+        ];
+        dependencies = [
+          {
+            name = "windows-result";
+            packageId = "windows-result 0.1.2";
+          }
+          {
+            name = "windows-targets";
+            packageId = "windows-targets 0.52.6";
+          }
+        ];
+        features = { };
+        resolvedDefaultFeatures = [ "default" ];
+      };
+      "windows-registry" = rec {
+        crateName = "windows-registry";
+        version = "0.2.0";
+        edition = "2021";
+        sha256 = "1c04923fq0rbvl3z0h67xr6rh2fgwkizhclhqv0j79i0nwdh0074";
+        libName = "windows_registry";
+        authors = [
+          "Microsoft"
+        ];
+        dependencies = [
+          {
+            name = "windows-result";
+            packageId = "windows-result 0.2.0";
+          }
+          {
+            name = "windows-strings";
+            packageId = "windows-strings";
+          }
+          {
+            name = "windows-targets";
+            packageId = "windows-targets 0.52.6";
+          }
+        ];
+
+      };
+      "windows-result 0.1.2" = rec {
+        crateName = "windows-result";
+        version = "0.1.2";
+        edition = "2021";
+        sha256 = "1y274q1v0vy21lhkgslpxpq1m08hvr1mcs2l88h1b1gcx0136f2y";
+        libName = "windows_result";
+        authors = [
+          "Microsoft"
+        ];
+        dependencies = [
+          {
+            name = "windows-targets";
+            packageId = "windows-targets 0.52.6";
+          }
+        ];
+        features = {
+          "default" = [ "std" ];
+        };
+        resolvedDefaultFeatures = [ "default" "std" ];
+      };
+      "windows-result 0.2.0" = rec {
+        crateName = "windows-result";
+        version = "0.2.0";
+        edition = "2021";
+        sha256 = "03mf2z1xcy2slhhsm15z24p76qxgm2m74xdjp8bihyag47c4640x";
+        libName = "windows_result";
+        authors = [
+          "Microsoft"
+        ];
+        dependencies = [
+          {
+            name = "windows-targets";
+            packageId = "windows-targets 0.52.6";
+          }
+        ];
+        features = {
+          "default" = [ "std" ];
+        };
+        resolvedDefaultFeatures = [ "default" "std" ];
+      };
+      "windows-strings" = rec {
+        crateName = "windows-strings";
+        version = "0.1.0";
+        edition = "2021";
+        sha256 = "042dxvi3133f7dyi2pgcvknwkikk47k8bddwxbq5s0l6qhjv3nac";
+        libName = "windows_strings";
+        authors = [
+          "Microsoft"
+        ];
+        dependencies = [
+          {
+            name = "windows-result";
+            packageId = "windows-result 0.2.0";
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "windows-targets";
+            packageId = "windows-targets 0.52.6";
+          }
+        ];
+        features = {
+          "default" = [ "std" ];
+        };
+        resolvedDefaultFeatures = [ "default" "std" ];
+      };
       "windows-sys 0.48.0" = rec {
         crateName = "windows-sys";
         version = "0.48.0";
         edition = "2018";
         sha256 = "1aan23v5gs7gya1lc46hqn9mdh8yph3fhxmhxlw36pn6pqc28zb7";
+        libName = "windows_sys";
         authors = [
           "Microsoft"
         ];
@@ -15477,36 +18488,294 @@ rec {
           "Win32_Web" = [ "Win32" ];
           "Win32_Web_InternetExplorer" = [ "Win32_Web" ];
         };
-        resolvedDefaultFeatures = [ "Win32" "Win32_Foundation" "Win32_Networking" "Win32_Networking_WinSock" "Win32_Security" "Win32_Storage" "Win32_Storage_FileSystem" "Win32_System" "Win32_System_Console" "Win32_System_Diagnostics" "Win32_System_Diagnostics_Debug" "Win32_System_IO" "Win32_System_Pipes" "Win32_System_Registry" "Win32_System_SystemServices" "Win32_System_Threading" "Win32_System_Time" "Win32_System_WindowsProgramming" "default" ];
+        resolvedDefaultFeatures = [ "Win32" "Win32_Foundation" "Win32_Networking" "Win32_Networking_WinSock" "Win32_Security" "Win32_Storage" "Win32_Storage_FileSystem" "Win32_System" "Win32_System_Console" "Win32_System_IO" "Win32_System_Pipes" "Win32_System_Threading" "Win32_System_WindowsProgramming" "default" ];
       };
       "windows-sys 0.52.0" = rec {
         crateName = "windows-sys";
         version = "0.52.0";
         edition = "2021";
         sha256 = "0gd3v4ji88490zgb6b5mq5zgbvwv7zx1ibn8v3x83rwcdbryaar8";
+        libName = "windows_sys";
+        authors = [
+          "Microsoft"
+        ];
+        dependencies = [
+          {
+            name = "windows-targets";
+            packageId = "windows-targets 0.52.6";
+          }
+        ];
+        features = {
+          "Wdk_Foundation" = [ "Wdk" ];
+          "Wdk_Graphics" = [ "Wdk" ];
+          "Wdk_Graphics_Direct3D" = [ "Wdk_Graphics" ];
+          "Wdk_Storage" = [ "Wdk" ];
+          "Wdk_Storage_FileSystem" = [ "Wdk_Storage" ];
+          "Wdk_Storage_FileSystem_Minifilters" = [ "Wdk_Storage_FileSystem" ];
+          "Wdk_System" = [ "Wdk" ];
+          "Wdk_System_IO" = [ "Wdk_System" ];
+          "Wdk_System_OfflineRegistry" = [ "Wdk_System" ];
+          "Wdk_System_Registry" = [ "Wdk_System" ];
+          "Wdk_System_SystemInformation" = [ "Wdk_System" ];
+          "Wdk_System_SystemServices" = [ "Wdk_System" ];
+          "Wdk_System_Threading" = [ "Wdk_System" ];
+          "Win32_Data" = [ "Win32" ];
+          "Win32_Data_HtmlHelp" = [ "Win32_Data" ];
+          "Win32_Data_RightsManagement" = [ "Win32_Data" ];
+          "Win32_Devices" = [ "Win32" ];
+          "Win32_Devices_AllJoyn" = [ "Win32_Devices" ];
+          "Win32_Devices_BiometricFramework" = [ "Win32_Devices" ];
+          "Win32_Devices_Bluetooth" = [ "Win32_Devices" ];
+          "Win32_Devices_Communication" = [ "Win32_Devices" ];
+          "Win32_Devices_DeviceAndDriverInstallation" = [ "Win32_Devices" ];
+          "Win32_Devices_DeviceQuery" = [ "Win32_Devices" ];
+          "Win32_Devices_Display" = [ "Win32_Devices" ];
+          "Win32_Devices_Enumeration" = [ "Win32_Devices" ];
+          "Win32_Devices_Enumeration_Pnp" = [ "Win32_Devices_Enumeration" ];
+          "Win32_Devices_Fax" = [ "Win32_Devices" ];
+          "Win32_Devices_HumanInterfaceDevice" = [ "Win32_Devices" ];
+          "Win32_Devices_PortableDevices" = [ "Win32_Devices" ];
+          "Win32_Devices_Properties" = [ "Win32_Devices" ];
+          "Win32_Devices_Pwm" = [ "Win32_Devices" ];
+          "Win32_Devices_Sensors" = [ "Win32_Devices" ];
+          "Win32_Devices_SerialCommunication" = [ "Win32_Devices" ];
+          "Win32_Devices_Tapi" = [ "Win32_Devices" ];
+          "Win32_Devices_Usb" = [ "Win32_Devices" ];
+          "Win32_Devices_WebServicesOnDevices" = [ "Win32_Devices" ];
+          "Win32_Foundation" = [ "Win32" ];
+          "Win32_Gaming" = [ "Win32" ];
+          "Win32_Globalization" = [ "Win32" ];
+          "Win32_Graphics" = [ "Win32" ];
+          "Win32_Graphics_Dwm" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Gdi" = [ "Win32_Graphics" ];
+          "Win32_Graphics_GdiPlus" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Hlsl" = [ "Win32_Graphics" ];
+          "Win32_Graphics_OpenGL" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Printing" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Printing_PrintTicket" = [ "Win32_Graphics_Printing" ];
+          "Win32_Management" = [ "Win32" ];
+          "Win32_Management_MobileDeviceManagementRegistration" = [ "Win32_Management" ];
+          "Win32_Media" = [ "Win32" ];
+          "Win32_Media_Audio" = [ "Win32_Media" ];
+          "Win32_Media_DxMediaObjects" = [ "Win32_Media" ];
+          "Win32_Media_KernelStreaming" = [ "Win32_Media" ];
+          "Win32_Media_Multimedia" = [ "Win32_Media" ];
+          "Win32_Media_Streaming" = [ "Win32_Media" ];
+          "Win32_Media_WindowsMediaFormat" = [ "Win32_Media" ];
+          "Win32_NetworkManagement" = [ "Win32" ];
+          "Win32_NetworkManagement_Dhcp" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Dns" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_InternetConnectionWizard" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_IpHelper" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Multicast" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Ndis" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_NetBios" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_NetManagement" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_NetShell" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_NetworkDiagnosticsFramework" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_P2P" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_QoS" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Rras" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Snmp" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WNet" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WebDav" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WiFi" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WindowsConnectionManager" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WindowsFilteringPlatform" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WindowsFirewall" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WindowsNetworkVirtualization" = [ "Win32_NetworkManagement" ];
+          "Win32_Networking" = [ "Win32" ];
+          "Win32_Networking_ActiveDirectory" = [ "Win32_Networking" ];
+          "Win32_Networking_Clustering" = [ "Win32_Networking" ];
+          "Win32_Networking_HttpServer" = [ "Win32_Networking" ];
+          "Win32_Networking_Ldap" = [ "Win32_Networking" ];
+          "Win32_Networking_WebSocket" = [ "Win32_Networking" ];
+          "Win32_Networking_WinHttp" = [ "Win32_Networking" ];
+          "Win32_Networking_WinInet" = [ "Win32_Networking" ];
+          "Win32_Networking_WinSock" = [ "Win32_Networking" ];
+          "Win32_Networking_WindowsWebServices" = [ "Win32_Networking" ];
+          "Win32_Security" = [ "Win32" ];
+          "Win32_Security_AppLocker" = [ "Win32_Security" ];
+          "Win32_Security_Authentication" = [ "Win32_Security" ];
+          "Win32_Security_Authentication_Identity" = [ "Win32_Security_Authentication" ];
+          "Win32_Security_Authorization" = [ "Win32_Security" ];
+          "Win32_Security_Credentials" = [ "Win32_Security" ];
+          "Win32_Security_Cryptography" = [ "Win32_Security" ];
+          "Win32_Security_Cryptography_Catalog" = [ "Win32_Security_Cryptography" ];
+          "Win32_Security_Cryptography_Certificates" = [ "Win32_Security_Cryptography" ];
+          "Win32_Security_Cryptography_Sip" = [ "Win32_Security_Cryptography" ];
+          "Win32_Security_Cryptography_UI" = [ "Win32_Security_Cryptography" ];
+          "Win32_Security_DiagnosticDataQuery" = [ "Win32_Security" ];
+          "Win32_Security_DirectoryServices" = [ "Win32_Security" ];
+          "Win32_Security_EnterpriseData" = [ "Win32_Security" ];
+          "Win32_Security_ExtensibleAuthenticationProtocol" = [ "Win32_Security" ];
+          "Win32_Security_Isolation" = [ "Win32_Security" ];
+          "Win32_Security_LicenseProtection" = [ "Win32_Security" ];
+          "Win32_Security_NetworkAccessProtection" = [ "Win32_Security" ];
+          "Win32_Security_WinTrust" = [ "Win32_Security" ];
+          "Win32_Security_WinWlx" = [ "Win32_Security" ];
+          "Win32_Storage" = [ "Win32" ];
+          "Win32_Storage_Cabinets" = [ "Win32_Storage" ];
+          "Win32_Storage_CloudFilters" = [ "Win32_Storage" ];
+          "Win32_Storage_Compression" = [ "Win32_Storage" ];
+          "Win32_Storage_DistributedFileSystem" = [ "Win32_Storage" ];
+          "Win32_Storage_FileHistory" = [ "Win32_Storage" ];
+          "Win32_Storage_FileSystem" = [ "Win32_Storage" ];
+          "Win32_Storage_Imapi" = [ "Win32_Storage" ];
+          "Win32_Storage_IndexServer" = [ "Win32_Storage" ];
+          "Win32_Storage_InstallableFileSystems" = [ "Win32_Storage" ];
+          "Win32_Storage_IscsiDisc" = [ "Win32_Storage" ];
+          "Win32_Storage_Jet" = [ "Win32_Storage" ];
+          "Win32_Storage_Nvme" = [ "Win32_Storage" ];
+          "Win32_Storage_OfflineFiles" = [ "Win32_Storage" ];
+          "Win32_Storage_OperationRecorder" = [ "Win32_Storage" ];
+          "Win32_Storage_Packaging" = [ "Win32_Storage" ];
+          "Win32_Storage_Packaging_Appx" = [ "Win32_Storage_Packaging" ];
+          "Win32_Storage_ProjectedFileSystem" = [ "Win32_Storage" ];
+          "Win32_Storage_StructuredStorage" = [ "Win32_Storage" ];
+          "Win32_Storage_Vhd" = [ "Win32_Storage" ];
+          "Win32_Storage_Xps" = [ "Win32_Storage" ];
+          "Win32_System" = [ "Win32" ];
+          "Win32_System_AddressBook" = [ "Win32_System" ];
+          "Win32_System_Antimalware" = [ "Win32_System" ];
+          "Win32_System_ApplicationInstallationAndServicing" = [ "Win32_System" ];
+          "Win32_System_ApplicationVerifier" = [ "Win32_System" ];
+          "Win32_System_ClrHosting" = [ "Win32_System" ];
+          "Win32_System_Com" = [ "Win32_System" ];
+          "Win32_System_Com_Marshal" = [ "Win32_System_Com" ];
+          "Win32_System_Com_StructuredStorage" = [ "Win32_System_Com" ];
+          "Win32_System_Com_Urlmon" = [ "Win32_System_Com" ];
+          "Win32_System_ComponentServices" = [ "Win32_System" ];
+          "Win32_System_Console" = [ "Win32_System" ];
+          "Win32_System_CorrelationVector" = [ "Win32_System" ];
+          "Win32_System_DataExchange" = [ "Win32_System" ];
+          "Win32_System_DeploymentServices" = [ "Win32_System" ];
+          "Win32_System_DeveloperLicensing" = [ "Win32_System" ];
+          "Win32_System_Diagnostics" = [ "Win32_System" ];
+          "Win32_System_Diagnostics_Ceip" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_Debug" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_Debug_Extensions" = [ "Win32_System_Diagnostics_Debug" ];
+          "Win32_System_Diagnostics_Etw" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_ProcessSnapshotting" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_ToolHelp" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_DistributedTransactionCoordinator" = [ "Win32_System" ];
+          "Win32_System_Environment" = [ "Win32_System" ];
+          "Win32_System_ErrorReporting" = [ "Win32_System" ];
+          "Win32_System_EventCollector" = [ "Win32_System" ];
+          "Win32_System_EventLog" = [ "Win32_System" ];
+          "Win32_System_EventNotificationService" = [ "Win32_System" ];
+          "Win32_System_GroupPolicy" = [ "Win32_System" ];
+          "Win32_System_HostCompute" = [ "Win32_System" ];
+          "Win32_System_HostComputeNetwork" = [ "Win32_System" ];
+          "Win32_System_HostComputeSystem" = [ "Win32_System" ];
+          "Win32_System_Hypervisor" = [ "Win32_System" ];
+          "Win32_System_IO" = [ "Win32_System" ];
+          "Win32_System_Iis" = [ "Win32_System" ];
+          "Win32_System_Ioctl" = [ "Win32_System" ];
+          "Win32_System_JobObjects" = [ "Win32_System" ];
+          "Win32_System_Js" = [ "Win32_System" ];
+          "Win32_System_Kernel" = [ "Win32_System" ];
+          "Win32_System_LibraryLoader" = [ "Win32_System" ];
+          "Win32_System_Mailslots" = [ "Win32_System" ];
+          "Win32_System_Mapi" = [ "Win32_System" ];
+          "Win32_System_Memory" = [ "Win32_System" ];
+          "Win32_System_Memory_NonVolatile" = [ "Win32_System_Memory" ];
+          "Win32_System_MessageQueuing" = [ "Win32_System" ];
+          "Win32_System_MixedReality" = [ "Win32_System" ];
+          "Win32_System_Ole" = [ "Win32_System" ];
+          "Win32_System_PasswordManagement" = [ "Win32_System" ];
+          "Win32_System_Performance" = [ "Win32_System" ];
+          "Win32_System_Performance_HardwareCounterProfiling" = [ "Win32_System_Performance" ];
+          "Win32_System_Pipes" = [ "Win32_System" ];
+          "Win32_System_Power" = [ "Win32_System" ];
+          "Win32_System_ProcessStatus" = [ "Win32_System" ];
+          "Win32_System_Recovery" = [ "Win32_System" ];
+          "Win32_System_Registry" = [ "Win32_System" ];
+          "Win32_System_RemoteDesktop" = [ "Win32_System" ];
+          "Win32_System_RemoteManagement" = [ "Win32_System" ];
+          "Win32_System_RestartManager" = [ "Win32_System" ];
+          "Win32_System_Restore" = [ "Win32_System" ];
+          "Win32_System_Rpc" = [ "Win32_System" ];
+          "Win32_System_Search" = [ "Win32_System" ];
+          "Win32_System_Search_Common" = [ "Win32_System_Search" ];
+          "Win32_System_SecurityCenter" = [ "Win32_System" ];
+          "Win32_System_Services" = [ "Win32_System" ];
+          "Win32_System_SetupAndMigration" = [ "Win32_System" ];
+          "Win32_System_Shutdown" = [ "Win32_System" ];
+          "Win32_System_StationsAndDesktops" = [ "Win32_System" ];
+          "Win32_System_SubsystemForLinux" = [ "Win32_System" ];
+          "Win32_System_SystemInformation" = [ "Win32_System" ];
+          "Win32_System_SystemServices" = [ "Win32_System" ];
+          "Win32_System_Threading" = [ "Win32_System" ];
+          "Win32_System_Time" = [ "Win32_System" ];
+          "Win32_System_TpmBaseServices" = [ "Win32_System" ];
+          "Win32_System_UserAccessLogging" = [ "Win32_System" ];
+          "Win32_System_Variant" = [ "Win32_System" ];
+          "Win32_System_VirtualDosMachines" = [ "Win32_System" ];
+          "Win32_System_WindowsProgramming" = [ "Win32_System" ];
+          "Win32_System_Wmi" = [ "Win32_System" ];
+          "Win32_UI" = [ "Win32" ];
+          "Win32_UI_Accessibility" = [ "Win32_UI" ];
+          "Win32_UI_ColorSystem" = [ "Win32_UI" ];
+          "Win32_UI_Controls" = [ "Win32_UI" ];
+          "Win32_UI_Controls_Dialogs" = [ "Win32_UI_Controls" ];
+          "Win32_UI_HiDpi" = [ "Win32_UI" ];
+          "Win32_UI_Input" = [ "Win32_UI" ];
+          "Win32_UI_Input_Ime" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_KeyboardAndMouse" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_Pointer" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_Touch" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_XboxController" = [ "Win32_UI_Input" ];
+          "Win32_UI_InteractionContext" = [ "Win32_UI" ];
+          "Win32_UI_Magnification" = [ "Win32_UI" ];
+          "Win32_UI_Shell" = [ "Win32_UI" ];
+          "Win32_UI_Shell_PropertiesSystem" = [ "Win32_UI_Shell" ];
+          "Win32_UI_TabletPC" = [ "Win32_UI" ];
+          "Win32_UI_TextServices" = [ "Win32_UI" ];
+          "Win32_UI_WindowsAndMessaging" = [ "Win32_UI" ];
+          "Win32_Web" = [ "Win32" ];
+          "Win32_Web_InternetExplorer" = [ "Win32_Web" ];
+        };
+        resolvedDefaultFeatures = [ "Wdk" "Wdk_Foundation" "Wdk_Storage" "Wdk_Storage_FileSystem" "Wdk_System" "Wdk_System_IO" "Win32" "Win32_Foundation" "Win32_NetworkManagement" "Win32_NetworkManagement_IpHelper" "Win32_Networking" "Win32_Networking_WinSock" "Win32_Security" "Win32_Security_Authentication" "Win32_Security_Authentication_Identity" "Win32_Security_Credentials" "Win32_Security_Cryptography" "Win32_Storage" "Win32_Storage_FileSystem" "Win32_System" "Win32_System_Com" "Win32_System_Console" "Win32_System_Diagnostics" "Win32_System_Diagnostics_Debug" "Win32_System_IO" "Win32_System_LibraryLoader" "Win32_System_Memory" "Win32_System_Pipes" "Win32_System_SystemServices" "Win32_System_Threading" "Win32_System_WindowsProgramming" "Win32_UI" "Win32_UI_Input" "Win32_UI_Input_KeyboardAndMouse" "Win32_UI_Shell" "default" ];
+      };
+      "windows-sys 0.59.0" = rec {
+        crateName = "windows-sys";
+        version = "0.59.0";
+        edition = "2021";
+        sha256 = "0fw5672ziw8b3zpmnbp9pdv1famk74f1l9fcbc3zsrzdg56vqf0y";
+        libName = "windows_sys";
         authors = [
           "Microsoft"
         ];
         dependencies = [
           {
             name = "windows-targets";
-            packageId = "windows-targets 0.52.0";
+            packageId = "windows-targets 0.52.6";
           }
         ];
         features = {
+          "Wdk" = [ "Win32_Foundation" ];
+          "Wdk_Devices" = [ "Wdk" ];
+          "Wdk_Devices_Bluetooth" = [ "Wdk_Devices" ];
+          "Wdk_Devices_HumanInterfaceDevice" = [ "Wdk_Devices" ];
           "Wdk_Foundation" = [ "Wdk" ];
           "Wdk_Graphics" = [ "Wdk" ];
           "Wdk_Graphics_Direct3D" = [ "Wdk_Graphics" ];
+          "Wdk_NetworkManagement" = [ "Wdk" ];
+          "Wdk_NetworkManagement_Ndis" = [ "Wdk_NetworkManagement" ];
+          "Wdk_NetworkManagement_WindowsFilteringPlatform" = [ "Wdk_NetworkManagement" ];
           "Wdk_Storage" = [ "Wdk" ];
           "Wdk_Storage_FileSystem" = [ "Wdk_Storage" ];
           "Wdk_Storage_FileSystem_Minifilters" = [ "Wdk_Storage_FileSystem" ];
           "Wdk_System" = [ "Wdk" ];
           "Wdk_System_IO" = [ "Wdk_System" ];
+          "Wdk_System_Memory" = [ "Wdk_System" ];
           "Wdk_System_OfflineRegistry" = [ "Wdk_System" ];
           "Wdk_System_Registry" = [ "Wdk_System" ];
           "Wdk_System_SystemInformation" = [ "Wdk_System" ];
           "Wdk_System_SystemServices" = [ "Wdk_System" ];
           "Wdk_System_Threading" = [ "Wdk_System" ];
+          "Win32" = [ "Win32_Foundation" ];
           "Win32_Data" = [ "Win32" ];
           "Win32_Data_HtmlHelp" = [ "Win32_Data" ];
           "Win32_Data_RightsManagement" = [ "Win32_Data" ];
@@ -15646,6 +18915,7 @@ rec {
           "Win32_System_Diagnostics_Etw" = [ "Win32_System_Diagnostics" ];
           "Win32_System_Diagnostics_ProcessSnapshotting" = [ "Win32_System_Diagnostics" ];
           "Win32_System_Diagnostics_ToolHelp" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_TraceLogging" = [ "Win32_System_Diagnostics" ];
           "Win32_System_DistributedTransactionCoordinator" = [ "Win32_System" ];
           "Win32_System_Environment" = [ "Win32_System" ];
           "Win32_System_ErrorReporting" = [ "Win32_System" ];
@@ -15717,6 +18987,7 @@ rec {
           "Win32_UI_InteractionContext" = [ "Win32_UI" ];
           "Win32_UI_Magnification" = [ "Win32_UI" ];
           "Win32_UI_Shell" = [ "Win32_UI" ];
+          "Win32_UI_Shell_Common" = [ "Win32_UI_Shell" ];
           "Win32_UI_Shell_PropertiesSystem" = [ "Win32_UI_Shell" ];
           "Win32_UI_TabletPC" = [ "Win32_UI" ];
           "Win32_UI_TextServices" = [ "Win32_UI" ];
@@ -15724,13 +18995,14 @@ rec {
           "Win32_Web" = [ "Win32" ];
           "Win32_Web_InternetExplorer" = [ "Win32_Web" ];
         };
-        resolvedDefaultFeatures = [ "Wdk" "Wdk_Foundation" "Wdk_Storage" "Wdk_Storage_FileSystem" "Win32" "Win32_Foundation" "Win32_NetworkManagement" "Win32_NetworkManagement_IpHelper" "Win32_Networking" "Win32_Networking_WinSock" "Win32_Security" "Win32_Security_Authentication" "Win32_Security_Authentication_Identity" "Win32_Security_Credentials" "Win32_Security_Cryptography" "Win32_Storage" "Win32_Storage_FileSystem" "Win32_System" "Win32_System_Com" "Win32_System_Console" "Win32_System_Diagnostics" "Win32_System_Diagnostics_Debug" "Win32_System_IO" "Win32_System_LibraryLoader" "Win32_System_Memory" "Win32_System_Threading" "Win32_System_WindowsProgramming" "Win32_UI" "Win32_UI_Shell" "default" ];
+        resolvedDefaultFeatures = [ "Win32" "Win32_Foundation" "Win32_Storage" "Win32_Storage_FileSystem" "Win32_System" "Win32_System_Threading" "default" ];
       };
       "windows-targets 0.48.5" = rec {
         crateName = "windows-targets";
         version = "0.48.5";
         edition = "2018";
         sha256 = "034ljxqshifs1lan89xwpcy1hp0lhdh4b5n0d2z4fwjx2piacbws";
+        libName = "windows_targets";
         authors = [
           "Microsoft"
         ];
@@ -15738,7 +19010,7 @@ rec {
           {
             name = "windows_aarch64_gnullvm";
             packageId = "windows_aarch64_gnullvm 0.48.5";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-pc-windows-gnullvm");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "aarch64-pc-windows-gnullvm");
           }
           {
             name = "windows_aarch64_msvc";
@@ -15763,7 +19035,7 @@ rec {
           {
             name = "windows_x86_64_gnullvm";
             packageId = "windows_x86_64_gnullvm 0.48.5";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnullvm");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "x86_64-pc-windows-gnullvm");
           }
           {
             name = "windows_x86_64_msvc";
@@ -15773,49 +19045,55 @@ rec {
         ];
 
       };
-      "windows-targets 0.52.0" = rec {
+      "windows-targets 0.52.6" = rec {
         crateName = "windows-targets";
-        version = "0.52.0";
+        version = "0.52.6";
         edition = "2021";
-        sha256 = "1kg7a27ynzw8zz3krdgy6w5gbqcji27j1sz4p7xk2j5j8082064a";
+        sha256 = "0wwrx625nwlfp7k93r2rra568gad1mwd888h1jwnl0vfg5r4ywlv";
+        libName = "windows_targets";
         authors = [
           "Microsoft"
         ];
         dependencies = [
           {
             name = "windows_aarch64_gnullvm";
-            packageId = "windows_aarch64_gnullvm 0.52.0";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-pc-windows-gnullvm");
+            packageId = "windows_aarch64_gnullvm 0.52.6";
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "aarch64-pc-windows-gnullvm");
           }
           {
             name = "windows_aarch64_msvc";
-            packageId = "windows_aarch64_msvc 0.52.0";
+            packageId = "windows_aarch64_msvc 0.52.6";
             target = { target, features }: (("aarch64" == target."arch" or null) && ("msvc" == target."env" or null) && (!(target."windows_raw_dylib" or false)));
           }
           {
             name = "windows_i686_gnu";
-            packageId = "windows_i686_gnu 0.52.0";
-            target = { target, features }: (("x86" == target."arch" or null) && ("gnu" == target."env" or null) && (!(target."windows_raw_dylib" or false)));
+            packageId = "windows_i686_gnu 0.52.6";
+            target = { target, features }: (("x86" == target."arch" or null) && ("gnu" == target."env" or null) && (!("llvm" == target."abi" or null)) && (!(target."windows_raw_dylib" or false)));
+          }
+          {
+            name = "windows_i686_gnullvm";
+            packageId = "windows_i686_gnullvm";
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "i686-pc-windows-gnullvm");
           }
           {
             name = "windows_i686_msvc";
-            packageId = "windows_i686_msvc 0.52.0";
+            packageId = "windows_i686_msvc 0.52.6";
             target = { target, features }: (("x86" == target."arch" or null) && ("msvc" == target."env" or null) && (!(target."windows_raw_dylib" or false)));
           }
           {
             name = "windows_x86_64_gnu";
-            packageId = "windows_x86_64_gnu 0.52.0";
+            packageId = "windows_x86_64_gnu 0.52.6";
             target = { target, features }: (("x86_64" == target."arch" or null) && ("gnu" == target."env" or null) && (!("llvm" == target."abi" or null)) && (!(target."windows_raw_dylib" or false)));
           }
           {
             name = "windows_x86_64_gnullvm";
-            packageId = "windows_x86_64_gnullvm 0.52.0";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnullvm");
+            packageId = "windows_x86_64_gnullvm 0.52.6";
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "x86_64-pc-windows-gnullvm");
           }
           {
             name = "windows_x86_64_msvc";
-            packageId = "windows_x86_64_msvc 0.52.0";
-            target = { target, features }: (("x86_64" == target."arch" or null) && ("msvc" == target."env" or null) && (!(target."windows_raw_dylib" or false)));
+            packageId = "windows_x86_64_msvc 0.52.6";
+            target = { target, features }: ((("x86_64" == target."arch" or null) || ("arm64ec" == target."arch" or null)) && ("msvc" == target."env" or null) && (!(target."windows_raw_dylib" or false)));
           }
         ];
 
@@ -15830,11 +19108,11 @@ rec {
         ];
 
       };
-      "windows_aarch64_gnullvm 0.52.0" = rec {
+      "windows_aarch64_gnullvm 0.52.6" = rec {
         crateName = "windows_aarch64_gnullvm";
-        version = "0.52.0";
+        version = "0.52.6";
         edition = "2021";
-        sha256 = "1shmn1kbdc0bpphcxz0vlph96bxz0h1jlmh93s9agf2dbpin8xyb";
+        sha256 = "1lrcq38cr2arvmz19v32qaggvj8bh1640mdm9c2fr877h0hn591j";
         authors = [
           "Microsoft"
         ];
@@ -15850,11 +19128,11 @@ rec {
         ];
 
       };
-      "windows_aarch64_msvc 0.52.0" = rec {
+      "windows_aarch64_msvc 0.52.6" = rec {
         crateName = "windows_aarch64_msvc";
-        version = "0.52.0";
+        version = "0.52.6";
         edition = "2021";
-        sha256 = "1vvmy1ypvzdvxn9yf0b8ygfl85gl2gpcyvsvqppsmlpisil07amv";
+        sha256 = "0sfl0nysnz32yyfh773hpi49b1q700ah6y7sacmjbqjjn5xjmv09";
         authors = [
           "Microsoft"
         ];
@@ -15870,11 +19148,21 @@ rec {
         ];
 
       };
-      "windows_i686_gnu 0.52.0" = rec {
+      "windows_i686_gnu 0.52.6" = rec {
         crateName = "windows_i686_gnu";
-        version = "0.52.0";
+        version = "0.52.6";
         edition = "2021";
-        sha256 = "04zkglz4p3pjsns5gbz85v4s5aw102raz4spj4b0lmm33z5kg1m2";
+        sha256 = "02zspglbykh1jh9pi7gn8g1f97jh1rrccni9ivmrfbl0mgamm6wf";
+        authors = [
+          "Microsoft"
+        ];
+
+      };
+      "windows_i686_gnullvm" = rec {
+        crateName = "windows_i686_gnullvm";
+        version = "0.52.6";
+        edition = "2021";
+        sha256 = "0rpdx1537mw6slcpqa0rm3qixmsb79nbhqy5fsm3q2q9ik9m5vhf";
         authors = [
           "Microsoft"
         ];
@@ -15890,11 +19178,11 @@ rec {
         ];
 
       };
-      "windows_i686_msvc 0.52.0" = rec {
+      "windows_i686_msvc 0.52.6" = rec {
         crateName = "windows_i686_msvc";
-        version = "0.52.0";
+        version = "0.52.6";
         edition = "2021";
-        sha256 = "16kvmbvx0vr0zbgnaz6nsks9ycvfh5xp05bjrhq65kj623iyirgz";
+        sha256 = "0rkcqmp4zzmfvrrrx01260q3xkpzi6fzi2x2pgdcdry50ny4h294";
         authors = [
           "Microsoft"
         ];
@@ -15910,11 +19198,11 @@ rec {
         ];
 
       };
-      "windows_x86_64_gnu 0.52.0" = rec {
+      "windows_x86_64_gnu 0.52.6" = rec {
         crateName = "windows_x86_64_gnu";
-        version = "0.52.0";
+        version = "0.52.6";
         edition = "2021";
-        sha256 = "1zdy4qn178sil5sdm63lm7f0kkcjg6gvdwmcprd2yjmwn8ns6vrx";
+        sha256 = "0y0sifqcb56a56mvn7xjgs8g43p33mfqkd8wj1yhrgxzma05qyhl";
         authors = [
           "Microsoft"
         ];
@@ -15930,11 +19218,11 @@ rec {
         ];
 
       };
-      "windows_x86_64_gnullvm 0.52.0" = rec {
+      "windows_x86_64_gnullvm 0.52.6" = rec {
         crateName = "windows_x86_64_gnullvm";
-        version = "0.52.0";
+        version = "0.52.6";
         edition = "2021";
-        sha256 = "17lllq4l2k1lqgcnw1cccphxp9vs7inq99kjlm2lfl9zklg7wr8s";
+        sha256 = "03gda7zjx1qh8k9nnlgb7m3w3s1xkysg55hkd1wjch8pqhyv5m94";
         authors = [
           "Microsoft"
         ];
@@ -15950,40 +19238,37 @@ rec {
         ];
 
       };
-      "windows_x86_64_msvc 0.52.0" = rec {
+      "windows_x86_64_msvc 0.52.6" = rec {
         crateName = "windows_x86_64_msvc";
-        version = "0.52.0";
+        version = "0.52.6";
         edition = "2021";
-        sha256 = "012wfq37f18c09ij5m6rniw7xxn5fcvrxbqd0wd8vgnl3hfn9yfz";
+        sha256 = "1v7rb5cibyzx8vak29pdrk8nx9hycsjs4w0jgms08qk49jl6v7sq";
         authors = [
           "Microsoft"
         ];
 
       };
-      "winreg" = rec {
-        crateName = "winreg";
-        version = "0.50.0";
-        edition = "2018";
-        sha256 = "1cddmp929k882mdh6i9f2as848f13qqna6czwsqzkh1pqnr5fkjj";
-        authors = [
-          "Igor Shaula <gentoo90@gmail.com>"
-        ];
+      "winnow" = rec {
+        crateName = "winnow";
+        version = "0.6.18";
+        edition = "2021";
+        sha256 = "0vrsrnf2nm9jsk1161x1vacmi3ns4h3h10fib91rs28zd6jbvab8";
         dependencies = [
           {
-            name = "cfg-if";
-            packageId = "cfg-if";
-          }
-          {
-            name = "windows-sys";
-            packageId = "windows-sys 0.48.0";
-            features = [ "Win32_Foundation" "Win32_System_Time" "Win32_System_Registry" "Win32_Security" "Win32_Storage_FileSystem" "Win32_System_Diagnostics_Debug" ];
+            name = "memchr";
+            packageId = "memchr";
+            optional = true;
+            usesDefaultFeatures = false;
           }
         ];
         features = {
-          "chrono" = [ "dep:chrono" ];
-          "serde" = [ "dep:serde" ];
-          "serialization-serde" = [ "transactions" "serde" ];
+          "debug" = [ "std" "dep:anstream" "dep:anstyle" "dep:is-terminal" "dep:terminal_size" ];
+          "default" = [ "std" ];
+          "simd" = [ "dep:memchr" ];
+          "std" = [ "alloc" "memchr?/std" ];
+          "unstable-doc" = [ "alloc" "std" "simd" "unstable-recover" ];
         };
+        resolvedDefaultFeatures = [ "alloc" "default" "std" ];
       };
       "wu-manber" = rec {
         crateName = "wu-manber";
@@ -15995,6 +19280,7 @@ rec {
           rev = "0d5b22bea136659f7de60b102a7030e0daaa503d";
           sha256 = "1zhk83lbq99xzyjwphv2qrb8f8qgfqwa5bbbvyzm0z0bljsjv0pd";
         };
+        libName = "wu_manber";
         authors = [
           "Joe Neeman <joeneeman@gmail.com>"
         ];
@@ -16041,18 +19327,6 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "unsupported" ];
       };
-      "xml-rs" = rec {
-        crateName = "xml-rs";
-        version = "0.8.19";
-        edition = "2021";
-        crateBin = [ ];
-        sha256 = "0nnpvk3fv32hgh7vs9gbg2swmzxx5yz73f4b7rak7q39q2x9rjqg";
-        libName = "xml";
-        authors = [
-          "Vladimir Matveev <vmatveev@citrine.cc>"
-        ];
-
-      };
       "xz2" = rec {
         crateName = "xz2";
         version = "0.1.7";
@@ -16126,6 +19400,7 @@ rec {
         edition = "2018";
         sha256 = "0fqvglw01w3hp7xj9gdk1800x9j7v58s9w8ijiyiz2a7krb39s8m";
         procMacro = true;
+        libName = "zerocopy_derive";
         authors = [
           "Joshua Liebow-Feeser <joshlf@google.com>"
         ];
@@ -16140,7 +19415,7 @@ rec {
           }
           {
             name = "syn";
-            packageId = "syn 2.0.48";
+            packageId = "syn 2.0.76";
           }
         ];
 
@@ -16164,9 +19439,9 @@ rec {
       };
       "zstd" = rec {
         crateName = "zstd";
-        version = "0.13.0";
+        version = "0.13.2";
         edition = "2018";
-        sha256 = "0401q54s9r35x2i7m1kwppgkj79g0pb6xz3xpby7qlkdb44k7yxz";
+        sha256 = "1ygkr6wspm9clbp7ykyl0rv69cfsf9q4lic9wcqiwn34lrwbgwpw";
         authors = [
           "Alexandre Bury <alexandre.bury@gmail.com>"
         ];
@@ -16197,9 +19472,10 @@ rec {
       };
       "zstd-safe" = rec {
         crateName = "zstd-safe";
-        version = "7.0.0";
+        version = "7.2.1";
         edition = "2018";
-        sha256 = "0gpav2lcibrpmyslmjkcn3w0w64qif3jjljd2h8lr4p249s7qx23";
+        sha256 = "0nch85m5cr493y26yvndm6a8j6sd9mxpr2awrim3dslcnr6sp8sl";
+        libName = "zstd_safe";
         authors = [
           "Alexandre Bury <alexandre.bury@gmail.com>"
         ];
@@ -16229,10 +19505,11 @@ rec {
       };
       "zstd-sys" = rec {
         crateName = "zstd-sys";
-        version = "2.0.9+zstd.1.5.5";
+        version = "2.0.13+zstd.1.5.6";
         edition = "2018";
         links = "zstd";
-        sha256 = "0mk6a2367swdi22zg03lcackpnvgq96d7120awd4i83lm2lfy5ly";
+        sha256 = "1almbackh06am0d2kc4a089n3al91jg3ahgg9kcrg3zfrwhhzzrq";
+        libName = "zstd_sys";
         authors = [
           "Alexandre Bury <alexandre.bury@gmail.com>"
         ];
@@ -16268,14 +19545,11 @@ rec {
       fuchsia = true;
       test = false;
 
-      /* We are choosing an arbitrary rust version to grab `lib` from,
-      which is unfortunate, but `lib` has been version-agnostic the
-      whole time so this is good enough for now.
-      */
-      os = pkgs.rust.lib.toTargetOs platform;
-      arch = pkgs.rust.lib.toTargetArch platform;
-      family = pkgs.rust.lib.toTargetFamily platform;
-      vendor = pkgs.rust.lib.toTargetVendor platform;
+      inherit (platform.rust.platform)
+        arch
+        os
+        vendor;
+      family = platform.rust.platform.target-family;
       env = "gnu";
       endian =
         if platform.parsed.cpu.significantByte.name == "littleEndian"
@@ -16365,52 +19639,41 @@ rec {
                 testPostRun
               ]);
           in
-          pkgs.runCommand "run-tests-${testCrate.name}"
-            {
-              inherit testCrateFlags;
-              buildInputs = testInputs;
-            } ''
-            set -e
+          pkgs.stdenvNoCC.mkDerivation {
+            name = "run-tests-${testCrate.name}";
 
-            export RUST_BACKTRACE=1
+            inherit (crate) src;
 
-            # recreate a file hierarchy as when running tests with cargo
+            inherit testCrateFlags;
 
-            # the source for test data
-            # It's necessary to locate the source in $NIX_BUILD_TOP/source/
-            # instead of $NIX_BUILD_TOP/
-            # because we compiled those test binaries in the former and not the latter.
-            # So all paths will expect source tree to be there and not in the build top directly.
-            # For example: $NIX_BUILD_TOP := /build in general, if you ask yourself.
-            # NOTE: There could be edge cases if `crate.sourceRoot` does exist but
-            # it's very hard to reason about them.
-            # Open a bug if you run into this!
-            mkdir -p source/
-            cd source/
+            buildInputs = testInputs;
 
-            ${pkgs.buildPackages.xorg.lndir}/bin/lndir ${crate.src}
+            buildPhase = ''
+              set -e
+              export RUST_BACKTRACE=1
 
-            # build outputs
-            testRoot=target/debug
-            mkdir -p $testRoot
+              # build outputs
+              testRoot=target/debug
+              mkdir -p $testRoot
 
-            # executables of the crate
-            # we copy to prevent std::env::current_exe() to resolve to a store location
-            for i in ${crate}/bin/*; do
-              cp "$i" "$testRoot"
-            done
-            chmod +w -R .
+              # executables of the crate
+              # we copy to prevent std::env::current_exe() to resolve to a store location
+              for i in ${crate}/bin/*; do
+                cp "$i" "$testRoot"
+              done
+              chmod +w -R .
 
-            # test harness executables are suffixed with a hash, like cargo does
-            # this allows to prevent name collision with the main
-            # executables of the crate
-            hash=$(basename $out)
-            for file in ${drv}/tests/*; do
-              f=$testRoot/$(basename $file)-$hash
-              cp $file $f
-              ${testCommand}
-            done
-          '';
+              # test harness executables are suffixed with a hash, like cargo does
+              # this allows to prevent name collision with the main
+              # executables of the crate
+              hash=$(basename $out)
+              for file in ${drv}/tests/*; do
+                f=$testRoot/$(basename $file)-$hash
+                cp $file $f
+                ${testCommand}
+              done
+            '';
+          };
       in
       pkgs.runCommand "${crate.name}-linked"
         {
@@ -16519,7 +19782,7 @@ rec {
             let
               self = {
                 crates = lib.mapAttrs (packageId: value: buildByPackageIdForPkgsImpl self pkgs packageId) crateConfigs;
-                target = makeTarget pkgs.stdenv.hostPlatform;
+                target = makeTarget stdenv.hostPlatform;
                 build = mkBuiltByPackageIdByPkgs pkgs.buildPackages;
               };
             in
@@ -16594,8 +19857,6 @@ rec {
             buildRustCrateForPkgsFunc pkgs
               (
                 crateConfig // {
-                  # https://github.com/NixOS/nixpkgs/issues/218712
-                  dontStrip = stdenv.hostPlatform.isDarwin;
                   src = crateConfig.src or (
                     pkgs.fetchurl rec {
                       name = "${crateConfig.crateName}-${crateConfig.version}.tar.gz";
diff --git a/tvix/Cargo.toml b/tvix/Cargo.toml
index 847d9aceeca0..7387bf2424f2 100644
--- a/tvix/Cargo.toml
+++ b/tvix/Cargo.toml
@@ -25,9 +25,13 @@ members = [
   "eval",
   "eval/builtin-macros",
   "glue",
+  "nar-bridge",
   "nix-compat",
+  "nix-compat-derive",
+  "nix-compat-derive-tests",
   "serde",
   "store",
+  "tracing",
 ]
 
 [workspace.lints.clippy]
@@ -35,6 +39,128 @@ members = [
 # https://github.com/rust-lang/rust-clippy/issues/12281
 blocks_in_conditions = "allow"
 
+[workspace.dependencies]
+anyhow = "1.0.86"
+async-compression = "0.4.12"
+async-process = "2.2.4"
+async-stream = "0.3.5"
+async-tempfile = "0.4.0"
+axum = "0.7.5"
+# https://github.com/liufuyang/bigtable_rs/pull/86
+bigtable_rs = { git = "https://github.com/liufuyang/bigtable_rs", rev = "1818355a5373a5bc2c84287e3a4e3807154ac8ef" }
+bitflags = "2.6.0"
+blake3 = "1.5.4"
+bstr = "1.10.0"
+bytes = "1.7.1"
+clap = "4.5.16"
+codemap = "0.1.3"
+codemap-diagnostic = "0.1.2"
+count-write = "0.1.0"
+criterion = "0.5"
+data-encoding = "2.6.0"
+digest = "0.10.7"
+dirs = "4.0.0"
+ed25519 = "2.2.3"
+ed25519-dalek = "2.1.1"
+enum-primitive-derive = "0.3.0"
+erased-serde = "0.4.5"
+expect-test = "1.5.0"
+fastcdc = "3.1.0"
+fuse-backend-rs = "0.11.0"
+futures = "0.3.30"
+genawaiter = { version = "0.99.1", default-features = false }
+glob = "0.3.1"
+hex-literal = "0.4.1"
+http = "1.1.0"
+hyper-util = "0.1.7"
+indicatif = "0.17.8"
+itertools = "0.12.1"
+lazy_static = "1.5.0"
+lexical-core = "0.8.5"
+libc = "0.2.158"
+lru = "0.12.4"
+magic = "0.16.2"
+md-5 = "0.10.6"
+mimalloc = "0.1.43"
+nix = "0.27.1"
+nohash-hasher = "0.2.0"
+nom = "7.1.3"
+num-traits = "0.2.19"
+object_store = "0.10.2"
+opentelemetry = "0.24.0"
+opentelemetry-http = "0.13.0"
+opentelemetry-otlp = "0.17.0"
+opentelemetry_sdk = "0.24.1"
+os_str_bytes = "6.6"
+parking_lot = "0.12.3"
+path-clean = "0.1"
+petgraph = "0.6.5"
+pin-project = "1.1"
+pin-project-lite = "0.2.14"
+pretty_assertions = "1.4.0"
+proc-macro2 = "1.0.86"
+proptest = { version = "1.5.0", default-features = false }
+prost = "0.13.1"
+prost-build = "0.13.1"
+quote = "1.0.37"
+redb = "2.1.2"
+regex = "1.10.6"
+reqwest = { version = "0.12.7", default-features = false }
+reqwest-middleware = "0.3.3"
+reqwest-tracing = { version = "0.5.3", default-features = false }
+rnix = "0.11.0"
+rowan = "*"
+rstest = "0.19.0"
+rstest_reuse = "0.6.0"
+rustc-hash = "2.0.0"
+rustyline = "10.1.1"
+serde = "1.0.209"
+serde_json = "1.0"
+serde_qs = "0.12.0"
+serde_tagged = "0.3.0"
+serde_with = "3.9.0"
+sha1 = "0.10.6"
+sha2 = "0.10.8"
+sled = "0.34.7"
+smol_str = "0.2.2"
+tabwriter = "1.4"
+tempfile = "3.12.0"
+test-strategy = "0.2.1"
+thiserror = "1.0.63"
+threadpool = "1.8.1"
+tokio = "1.39.3"
+tokio-listener = "0.4.3"
+tokio-retry = "0.3.0"
+tokio-stream = "0.1.15"
+tokio-tar = "0.3.1"
+tokio-test = "0.4.4"
+tokio-util = "0.7.11"
+tonic = "0.12.2"
+tonic-build = "0.12.2"
+tonic-health = { version = "0.12.2", default-features = false }
+tonic-reflection = "0.12.2"
+tower = "0.4.13"
+tower-http = "0.5.2"
+tracing = "0.1.40"
+tracing-indicatif = "0.3.6"
+tracing-opentelemetry = "0.25.0"
+tracing-subscriber = "0.3.18"
+tracing-tracy = "0.11.2"
+trybuild = "1.0.99"
+url = "2.5.2"
+vhost = "0.6"
+vhost-user-backend = "0.8"
+virtio-bindings = "0.2.2"
+virtio-queue = "0.7"
+vm-memory = "0.10"
+vmm-sys-util = "0.11"
+vu128 = "1.1.0"
+walkdir = "2.5.0"
+# https://github.com/jneem/wu-manber/pull/1
+wu-manber = { git = "https://github.com/tvlfyi/wu-manber.git" }
+xattr = "1.3.1"
+zstd = "0.13.2"
+
 # Add a profile to all targets that enables release optimisations, but
 # retains debug symbols. This is great for use with
 # benchmarking/profiling tools.
diff --git a/tvix/README.md b/tvix/README.md
index fb536bc229c0..af12d6ff4b10 100644
--- a/tvix/README.md
+++ b/tvix/README.md
@@ -61,7 +61,7 @@ This folder contains the following components:
 * `//tvix/castore` - subtree storage/transfer in a content-addressed fashion
 * `//tvix/cli` - preliminary REPL & CLI implementation for Tvix
 * `//tvix/eval` - an implementation of the Nix programming language
-* `//tvix/nar-bridge-go` - a HTTP webserver providing a Nix HTTP Binary Cache interface in front of a tvix-store
+* `//tvix/nar-bridge` - a HTTP webserver providing a Nix HTTP Binary Cache interface in front of a tvix-store
 * `//tvix/nix-compat` - a Rust library for compatibility with C++ Nix, features like encodings and hashing schemes and formats
 * `//tvix/serde` - a Rust library for using the Nix language for app configuration
 * `//tvix/store` - a "filesystem" linking Nix store paths and metadata with the content-addressed layer
@@ -104,6 +104,11 @@ Rust projects under `//tvix`, be sure to run
 `mg run //tools:crate2nix-generate` in `//tvix` itself and commit the changes
 to the generated `Cargo.nix` file. This only applies to the full TVL checkout.
 
+When adding/removing a Cargo feature for a crate, you will want to add it to the
+features power set that gets tested in CI. For each crate there's a default.nix with a
+`mkFeaturePowerset` invocation, modify the list to include/remove the feature.
+Note that you don't want to add "collection" features, such as `fs` for tvix-[ca]store or `default`.
+
 ## License structure
 
 All code implemented for Tvix is licensed under the GPL-3.0, with the
diff --git a/tvix/boot/README.md b/tvix/boot/README.md
index 9c7b722a7abb..b183285b2b37 100644
--- a/tvix/boot/README.md
+++ b/tvix/boot/README.md
@@ -31,21 +31,35 @@ the `tvix` directory:
 export PATH=$PATH:$PWD/target/release-with-debug
 ```
 
-Secondly, configure tvix to use the local backend:
+Now, spin up tvix-daemon, connecting to some (local) backends:
 
 ```
-export BLOB_SERVICE_ADDR=sled://$PWD/blobs.sled
-export DIRECTORY_SERVICE_ADDR=sled://$PWD/directories.sled
-export PATH_INFO_SERVICE_ADDR=sled://$PWD/pathinfo.sled
+tvix-store --otlp=false daemon \
+  --blob-service-addr=objectstore+file://$PWD/blobs \
+  --directory-service-addr=sled://$PWD/directories.sled \
+  --path-info-service-addr=sled://$PWD/pathinfo.sled &
 ```
 
-Potentially copy some data into tvix-store (via nar-bridge):
+Copy some data into tvix-store (we use `nar-bridge` for this for now):
 
 ```
-mg run //tvix:store -- daemon &
-$(mg build //tvix:nar-bridge-go)/bin/nar-bridge-http &
+mg run //tvix:nar-bridge -- --otlp=false &
 rm -Rf ~/.cache/nix; nix copy --to http://localhost:9000\?compression\=none $(mg build //third_party/nixpkgs:hello)
-pkill nar-bridge-http; pkill tvix-store
+pkill nar-bridge
+```
+
+By default, the `tvix-store virtiofs` command used in the `runVM` script
+connects to a running `tvix-store daemon` via gRPC - in which case you want to
+keep `tvix-store daemon` running.
+
+In case you want to have `tvix-store virtiofs` open the stores directly, kill
+`tvix-store daemon` too, and export the addresses from above:
+
+```
+pkill tvix-store
+export BLOB_SERVICE_ADDR=objectstore+file://$PWD/blobs
+export DIRECTORY_SERVICE_ADDR=sled://$PWD/directories.sled
+export PATH_INFO_SERVICE_ADDR=sled://$PWD/pathinfo.sled
 ```
 
 #### Interactive shell
@@ -100,9 +114,12 @@ Hello, world!
 [    0.299422] reboot: Power down
 ```
 
-#### Execute a NixOS system closure
-It's also possible to invoke a system closure. To do this, tvix-init honors the
-init= cmdline option, and will switch_root to it.
+#### Boot a NixOS system closure
+It's also possible to boot a system closure. To do this, tvix-init honors the
+init= cmdline option, and will `switch_root` to it.
+
+Make sure to first copy that system closure into tvix-store,
+using a similar `nix copy` comamnd as above.
 
 
 ```
diff --git a/tvix/boot/default.nix b/tvix/boot/default.nix
index 85995ffbf2c5..60eb2f1b7608 100644
--- a/tvix/boot/default.nix
+++ b/tvix/boot/default.nix
@@ -1,13 +1,16 @@
-{ depot, pkgs, ... }:
+{ lib, pkgs, ... }:
 
 rec {
   # A binary that sets up /nix/store from virtiofs, lists all store paths, and
   # powers off the machine.
-  tvix-init = depot.nix.buildGo.program {
+  tvix-init = pkgs.buildGoModule rec {
     name = "tvix-init";
-    srcs = [
-      ./tvix-init.go
-    ];
+    src = lib.fileset.toSource {
+      root = ./.;
+      fileset = ./tvix-init.go;
+    };
+    vendorHash = null;
+    postPatch = "go mod init ${name}";
   };
 
   # A kernel with virtiofs support baked in
@@ -99,7 +102,7 @@ rec {
      --memory mergeable=on,shared=on,size=$CH_MEM_SIZE \
      --console null \
      --serial tty \
-     --kernel ${kernel.dev}/vmlinux \
+     --kernel ${kernel}/${pkgs.stdenv.hostPlatform.linux-kernel.target} \
      --initramfs ${initrd} \
      --cmdline "console=ttyS0 $CH_CMDLINE" \
      --fs tag=tvix,socket=$tempdir/tvix.sock,num_queues=''${CH_NUM_CPU},queue_size=512
diff --git a/tvix/boot/tests/default.nix b/tvix/boot/tests/default.nix
index 5c7f97a1cea3..97477572078e 100644
--- a/tvix/boot/tests/default.nix
+++ b/tvix/boot/tests/default.nix
@@ -16,8 +16,17 @@ let
       # Whether the path should be imported as a closure.
       # If false, importPathName must be specified.
     , isClosure ? false
+      # Whether to use nar-bridge to upload, rather than tvix-store copy.
+      # using nar-bridge currently is "slower", as the `pkgs.mkBinaryCache` build
+      # takes quite some time.
+    , useNarBridge ? false
+
     , importPathName ? null
 
+      # Commands to run before starting the tvix-daemon. Useful to provide
+      # auxillary mock services.
+    , preStart ? ""
+
       # The cmdline to pass to the VM.
       # Defaults to tvix.find, which lists all files in the store.
     , vmCmdline ? "tvix.find"
@@ -29,29 +38,35 @@ let
       assert isClosure -> importPathName == null;
       assert (!isClosure) -> importPathName != null;
 
-      pkgs.stdenv.mkDerivation {
+      pkgs.stdenv.mkDerivation ({
         name = "run-vm";
 
-        __structuredAttrs = true;
-        exportReferencesGraph.closure = [ path ];
-
         nativeBuildInputs = [
           depot.tvix.store
           depot.tvix.boot.runVM
+        ] ++ lib.optionals (isClosure && useNarBridge) [
+          depot.tvix.nar-bridge
+          pkgs.curl
+          pkgs.parallel
+          pkgs.xz.bin
         ];
         buildCommand = ''
           touch $out
+          # Ensure we can construct http clients.
+          export SSL_CERT_FILE=/dev/null
+
+          ${preStart}
 
           # Start the tvix daemon, listening on a unix socket.
-          BLOB_SERVICE_ADDR=${blobServiceAddr} \
-            DIRECTORY_SERVICE_ADDR=${directoryServiceAddr} \
-            PATH_INFO_SERVICE_ADDR=${pathInfoServiceAddr} \
+          BLOB_SERVICE_ADDR=${lib.escapeShellArg blobServiceAddr} \
+          DIRECTORY_SERVICE_ADDR=${lib.escapeShellArg directoryServiceAddr} \
+          PATH_INFO_SERVICE_ADDR=${lib.escapeShellArg pathInfoServiceAddr} \
             tvix-store \
               --otlp=false \
               daemon -l $PWD/tvix-store.sock &
 
-          # Wait for the socket to be created.
-          while [ ! -e $PWD/tvix-store.sock ]; do sleep 1; done
+          # Wait for the service to report healthy.
+          timeout 22 sh -c "until ${pkgs.ip2unix}/bin/ip2unix -r out,path=$PWD/tvix-store.sock ${pkgs.grpc-health-check}/bin/grpc-health-check --address 127.0.0.1 --port 8080; do sleep 1; done"
 
           # Export env vars so that subsequent tvix-store commands will talk to
           # our tvix-store daemon over the unix socket.
@@ -64,10 +79,38 @@ let
           outpath=$(tvix-store import ${importPathName})
 
           echo "imported to $outpath"
-        '' + lib.optionalString (isClosure) ''
+        '' + lib.optionalString (isClosure && !useNarBridge) ''
           echo "Copying closure ${path}โ€ฆ"
           # This picks up the `closure` key in `$NIX_ATTRS_JSON_FILE` automatically.
           tvix-store --otlp=false copy
+        '' + lib.optionalString (isClosure && useNarBridge) ''
+          echo "Starting nar-bridgeโ€ฆ"
+          nar-bridge \
+            --otlp=false \
+            -l $PWD/nar-bridge.sock &
+
+          # Wait for nar-bridge to report healthy.
+          timeout 22 sh -c "until ${pkgs.curl}/bin/curl -s --unix-socket $PWD/nar-bridge.sock http:///nix-binary-cache; do sleep 1; done"
+
+          # Upload. We can't use nix copy --to http://โ€ฆ, as it wants access to the nix db.
+          # However, we can use mkBinaryCache to assemble .narinfo and .nar.xz to upload,
+          # and then drive a HTTP client ourselves.
+          to_upload=${pkgs.mkBinaryCache { rootPaths = [path];}}
+
+          # Upload all NAR files (with some parallelism).
+          # As mkBinaryCache produces them xz-compressed, unpack them on the fly.
+          # nar-bridge doesn't care about the path we upload *to*, but a
+          # subsequent .narinfo upload need to refer to its contents (by narhash).
+          echo -e "Uploading NARsโ€ฆ "
+          ls -d $to_upload/nar/*.nar.xz | parallel 'xz -d < {} | curl -s -T - --unix-socket $PWD/nar-bridge.sock http://localhost:9000/nar/$(basename {} | cut -d "." -f 1).nar'
+          echo "Done."
+
+          # Upload all NARInfo files.
+          # FUTUREWORK: This doesn't upload them in order, and currently relies
+          # on PathInfoService not doing any checking.
+          # In the future, we might want to make this behaviour configurable,
+          # and disable checking here, to keep the logic simple.
+          ls -d $to_upload/*.narinfo | parallel 'curl -s -T - --unix-socket $PWD/nar-bridge.sock http://localhost:9000/$(basename {}) < {}'
         '' + ''
           # Invoke a VM using tvix as the backing store, ensure the outpath appears in its listing.
           echo "Starting VMโ€ฆ"
@@ -76,11 +119,20 @@ let
           grep "${assertVMOutput}" output.txt
         '';
         requiredSystemFeatures = [ "kvm" ];
-      };
-
-  systemFor = sys: (depot.ops.nixos.nixosFor sys).system;
+        # HACK: The boot tests are sometimes flaky, and we don't want them to
+        # periodically fail other build. Have Buildkite auto-retry them 2 times
+        # on failure.
+        # Logs for individual failures are still available, so it won't hinder
+        # flakiness debuggability.
+        meta.ci.buildkiteExtraStepArgs = {
+          retry.automatic = true;
+        };
+      } // lib.optionalAttrs (isClosure && !useNarBridge) {
+        __structuredAttrs = true;
+        exportReferencesGraph.closure = [ path ];
+      });
 
-  testSystem = systemFor ({ modulesPath, pkgs, ... }: {
+  testSystem = (pkgs.nixos {
     # Set some options necessary to evaluate.
     boot.loader.systemd-boot.enable = true;
     # TODO: figure out how to disable this without causing eval to fail
@@ -100,7 +152,10 @@ let
 
     # Don't warn about stateVersion.
     system.stateVersion = "24.05";
-  });
+
+    # Speed-up evaluation and building.
+    documentation.enable = lib.mkForce false;
+  }).config.system.build.toplevel;
 
 in
 depot.nix.readTree.drvTargets
@@ -110,22 +165,74 @@ depot.nix.readTree.drvTargets
     importPathName = "docs";
   });
   docs-persistent = (mkBootTest {
-    blobServiceAddr = "objectstore+file://$PWD/blobs";
-    directoryServiceAddr = "sled://$PWD/directories.sled";
-    pathInfoServiceAddr = "sled://$PWD/pathinfo.sled";
+    blobServiceAddr = "objectstore+file:///build/blobs";
+    directoryServiceAddr = "redb:///build/directories.redb";
+    pathInfoServiceAddr = "redb:///build/pathinfo.redb";
     path = ../../docs;
     importPathName = "docs";
   });
 
   closure-tvix = (mkBootTest {
-    blobServiceAddr = "objectstore+file://$PWD/blobs";
+    blobServiceAddr = "objectstore+file:///build/blobs";
     path = depot.tvix.store;
     isClosure = true;
   });
 
   closure-nixos = (mkBootTest {
-    blobServiceAddr = "objectstore+file://$PWD/blobs";
+    blobServiceAddr = "objectstore+file:///build/blobs";
+    pathInfoServiceAddr = "redb:///build/pathinfo.redb";
+    directoryServiceAddr = "redb:///build/directories.redb";
+    path = testSystem;
+    isClosure = true;
+    vmCmdline = "init=${testSystem}/init panic=-1"; # reboot immediately on panic
+    assertVMOutput = "Onwards and upwards.";
+  });
+
+  closure-nixos-bigtable = (mkBootTest {
+    blobServiceAddr = "objectstore+file:///build/blobs";
+    directoryServiceAddr = "bigtable://instance-1?project_id=project-1&table_name=directories&family_name=cf1";
+    pathInfoServiceAddr = "bigtable://instance-1?project_id=project-1&table_name=pathinfos&family_name=cf1";
+    path = testSystem;
+    useNarBridge = true;
+    preStart = ''
+      ${pkgs.cbtemulator}/bin/cbtemulator -address $PWD/cbtemulator.sock &
+      timeout 22 sh -c 'until [ -e $PWD/cbtemulator.sock ]; do sleep 1; done'
+
+      export BIGTABLE_EMULATOR_HOST=unix://$PWD/cbtemulator.sock
+      ${pkgs.google-cloud-bigtable-tool}/bin/cbt -instance instance-1 -project project-1 createtable directories
+      ${pkgs.google-cloud-bigtable-tool}/bin/cbt -instance instance-1 -project project-1 createfamily directories cf1
+      ${pkgs.google-cloud-bigtable-tool}/bin/cbt -instance instance-1 -project project-1 createtable pathinfos
+      ${pkgs.google-cloud-bigtable-tool}/bin/cbt -instance instance-1 -project project-1 createfamily pathinfos cf1
+    '';
+    isClosure = true;
+    vmCmdline = "init=${testSystem}/init panic=-1"; # reboot immediately on panic
+    assertVMOutput = "Onwards and upwards.";
+  });
+
+  closure-nixos-s3 = (mkBootTest {
+    blobServiceAddr = "objectstore+s3://mybucket/blobs?aws_access_key_id=myaccesskey&aws_secret_access_key=supersecret&aws_endpoint_url=http%3A%2F%2Flocalhost%3A9000&aws_allow_http=1";
+    # we cannot use s3 here yet without any caching layer, as we don't allow "deeper" access to directories (non-root nodes)
+    # directoryServiceAddr = "objectstore+s3://mybucket/directories?aws_access_key_id=myaccesskey&aws_secret_access_key=supersecret&endpoint=http%3A%2F%2Flocalhost%3A9000&aws_allow_http=1";
+    directoryServiceAddr = "memory://";
+    pathInfoServiceAddr = "memory://";
+    path = testSystem;
+    useNarBridge = true;
+    preStart = ''
+      MINIO_ACCESS_KEY=myaccesskey MINIO_SECRET_KEY=supersecret MINIO_ADDRESS=127.0.0.1:9000 ${pkgs.minio}/bin/minio server $(mktemp -d) &
+      timeout 22 sh -c 'until ${pkgs.netcat}/bin/nc -z $0 $1; do sleep 1; done' localhost 9000
+      mc_config_dir=$(mktemp -d)
+      ${pkgs.minio-client}/bin/mc --config-dir $mc_config_dir alias set 'myminio' 'http://127.0.0.1:9000' 'myaccesskey' 'supersecret'
+      ${pkgs.minio-client}/bin/mc --config-dir $mc_config_dir mb myminio/mybucket
+    '';
+    isClosure = true;
+    vmCmdline = "init=${testSystem}/init panic=-1"; # reboot immediately on panic
+    assertVMOutput = "Onwards and upwards.";
+  });
+
+  closure-nixos-nar-bridge = (mkBootTest {
+    blobServiceAddr = "objectstore+file:///build/blobs";
     path = testSystem;
+    useNarBridge = true;
     isClosure = true;
     vmCmdline = "init=${testSystem}/init panic=-1"; # reboot immediately on panic
     assertVMOutput = "Onwards and upwards.";
diff --git a/tvix/build-go/build.pb.go b/tvix/build-go/build.pb.go
index 9c6bd5f24883..c1fbf28b547a 100644
--- a/tvix/build-go/build.pb.go
+++ b/tvix/build-go/build.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.2
 // 	protoc        (unknown)
 // source: tvix/build/protos/build.proto
 
@@ -66,8 +66,8 @@ type BuildRequest struct {
 
 	// The list of all root nodes that should be visible in `inputs_dir` at the
 	// time of the build.
-	// As root nodes are content-addressed, no additional signatures are needed
-	// to substitute / make these available in the build environment.
+	// As all references are content-addressed, no additional signatures are
+	// needed to substitute / make these available in the build environment.
 	// Inputs MUST be sorted by their names.
 	Inputs []*castore_go.Node `protobuf:"bytes,1,rep,name=inputs,proto3" json:"inputs,omitempty"`
 	// The command (and its args) executed as the build script.
@@ -560,7 +560,7 @@ func file_tvix_build_protos_build_proto_rawDescGZIP() []byte {
 }
 
 var file_tvix_build_protos_build_proto_msgTypes = make([]protoimpl.MessageInfo, 5)
-var file_tvix_build_protos_build_proto_goTypes = []interface{}{
+var file_tvix_build_protos_build_proto_goTypes = []any{
 	(*BuildRequest)(nil),                  // 0: tvix.build.v1.BuildRequest
 	(*Build)(nil),                         // 1: tvix.build.v1.Build
 	(*BuildRequest_EnvVar)(nil),           // 2: tvix.build.v1.BuildRequest.EnvVar
@@ -588,7 +588,7 @@ func file_tvix_build_protos_build_proto_init() {
 		return
 	}
 	if !protoimpl.UnsafeEnabled {
-		file_tvix_build_protos_build_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_build_protos_build_proto_msgTypes[0].Exporter = func(v any, i int) any {
 			switch v := v.(*BuildRequest); i {
 			case 0:
 				return &v.state
@@ -600,7 +600,7 @@ func file_tvix_build_protos_build_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_build_protos_build_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_build_protos_build_proto_msgTypes[1].Exporter = func(v any, i int) any {
 			switch v := v.(*Build); i {
 			case 0:
 				return &v.state
@@ -612,7 +612,7 @@ func file_tvix_build_protos_build_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_build_protos_build_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_build_protos_build_proto_msgTypes[2].Exporter = func(v any, i int) any {
 			switch v := v.(*BuildRequest_EnvVar); i {
 			case 0:
 				return &v.state
@@ -624,7 +624,7 @@ func file_tvix_build_protos_build_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_build_protos_build_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_build_protos_build_proto_msgTypes[3].Exporter = func(v any, i int) any {
 			switch v := v.(*BuildRequest_BuildConstraints); i {
 			case 0:
 				return &v.state
@@ -636,7 +636,7 @@ func file_tvix_build_protos_build_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_build_protos_build_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_build_protos_build_proto_msgTypes[4].Exporter = func(v any, i int) any {
 			switch v := v.(*BuildRequest_AdditionalFile); i {
 			case 0:
 				return &v.state
diff --git a/tvix/build-go/rpc_build.pb.go b/tvix/build-go/rpc_build.pb.go
index aae0cd9d4776..b2f947362508 100644
--- a/tvix/build-go/rpc_build.pb.go
+++ b/tvix/build-go/rpc_build.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.2
 // 	protoc        (unknown)
 // source: tvix/build/protos/rpc_build.proto
 
@@ -40,7 +40,7 @@ var file_tvix_build_protos_rpc_build_proto_rawDesc = []byte{
 	0x69, 0x6c, 0x64, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
 }
 
-var file_tvix_build_protos_rpc_build_proto_goTypes = []interface{}{
+var file_tvix_build_protos_rpc_build_proto_goTypes = []any{
 	(*BuildRequest)(nil), // 0: tvix.build.v1.BuildRequest
 	(*Build)(nil),        // 1: tvix.build.v1.Build
 }
diff --git a/tvix/build/Cargo.toml b/tvix/build/Cargo.toml
index cf25465cca24..b9073b7ff61a 100644
--- a/tvix/build/Cargo.toml
+++ b/tvix/build/Cargo.toml
@@ -4,33 +4,31 @@ version = "0.1.0"
 edition = "2021"
 
 [dependencies]
-bytes = "1.4.0"
-clap = { version = "4.0", features = ["derive", "env"] }
-itertools = "0.12.0"
-prost = "0.12.1"
-thiserror = "1.0.56"
-tokio = { version = "1.32.0" }
-tokio-listener = { version = "0.4.1", features = [ "tonic011" ] }
-tonic = { version = "0.11.0", features = ["tls", "tls-roots"] }
+bytes = { workspace = true }
+clap = { workspace = true, features = ["derive", "env"] }
+itertools = { workspace = true }
+prost = { workspace = true }
+thiserror = { workspace = true }
+tokio = { workspace = true }
+tokio-listener = { workspace = true, features = ["tonic012"] }
+tonic = { workspace = true, features = ["tls", "tls-roots"] }
 tvix-castore = { path = "../castore" }
-tracing = "0.1.37"
-tracing-subscriber = "0.3.16"
-url = "2.4.0"
-
-[dependencies.tonic-reflection]
-optional = true
-version = "0.11.0"
+tvix-tracing = { path = "../tracing" }
+tracing = { workspace = true }
+url = { workspace = true }
+mimalloc = { workspace = true }
+tonic-reflection = { workspace = true, optional = true }
 
 [build-dependencies]
-prost-build = "0.12.1"
-tonic-build = "0.11.0"
+prost-build = { workspace = true }
+tonic-build = { workspace = true }
 
 [features]
 default = []
-tonic-reflection = ["dep:tonic-reflection"]
+tonic-reflection = ["dep:tonic-reflection", "tvix-castore/tonic-reflection"]
 
 [dev-dependencies]
-rstest = "0.19.0"
+rstest = { workspace = true }
 
 [lints]
 workspace = true
diff --git a/tvix/build/build.rs b/tvix/build/build.rs
index c3518ea8772b..fe230cbeca8a 100644
--- a/tvix/build/build.rs
+++ b/tvix/build/build.rs
@@ -12,24 +12,20 @@ fn main() -> Result<()> {
         builder = builder.file_descriptor_set_path(descriptor_path);
     };
 
-    // https://github.com/hyperium/tonic/issues/908
-    let mut config = prost_build::Config::new();
-    config.bytes(["."]);
-    config.extern_path(".tvix.castore.v1", "::tvix_castore::proto");
-
     builder
         .build_server(true)
         .build_client(true)
         .emit_rerun_if_changed(false)
-        .compile_with_config(
-            config,
+        .bytes(["."])
+        .extern_path(".tvix.castore.v1", "::tvix_castore::proto")
+        .compile(
             &[
                 "tvix/build/protos/build.proto",
                 "tvix/build/protos/rpc_build.proto",
             ],
             // If we are in running `cargo build` manually, using `../..` works fine,
             // but in case we run inside a nix build, we need to instead point PROTO_ROOT
-            // to a sparseTree containing that structure.
+            // to a custom tree containing that structure.
             &[match std::env::var_os("PROTO_ROOT") {
                 Some(proto_root) => proto_root.to_str().unwrap().to_owned(),
                 None => "../..".to_string(),
diff --git a/tvix/build/default.nix b/tvix/build/default.nix
index a2a3bea0c5a5..17b52354bbeb 100644
--- a/tvix/build/default.nix
+++ b/tvix/build/default.nix
@@ -1,5 +1,11 @@
-{ depot, pkgs, ... }:
+{ depot, lib, ... }:
 
-depot.tvix.crates.workspaceMembers.tvix-build.build.override {
+(depot.tvix.crates.workspaceMembers.tvix-build.build.override {
   runTests = true;
-}
+}).overrideAttrs (old: rec {
+  meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+  passthru = old.passthru // (depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = [ "tonic-reflection" ];
+  });
+})
diff --git a/tvix/build/protos/build.proto b/tvix/build/protos/build.proto
index f1f6bf0b05d8..7a3c49db4873 100644
--- a/tvix/build/protos/build.proto
+++ b/tvix/build/protos/build.proto
@@ -47,8 +47,8 @@ option go_package = "code.tvl.fyi/tvix/build-go;buildv1";
 message BuildRequest {
   // The list of all root nodes that should be visible in `inputs_dir` at the
   // time of the build.
-  // As root nodes are content-addressed, no additional signatures are needed
-  // to substitute / make these available in the build environment.
+  // As all references are content-addressed, no additional signatures are
+  // needed to substitute / make these available in the build environment.
   // Inputs MUST be sorted by their names.
   repeated tvix.castore.v1.Node inputs = 1;
 
diff --git a/tvix/build/protos/default.nix b/tvix/build/protos/default.nix
index 790655ac7522..287b513136c9 100644
--- a/tvix/build/protos/default.nix
+++ b/tvix/build/protos/default.nix
@@ -1,17 +1,12 @@
-{ depot, pkgs, ... }:
+{ depot, pkgs, lib, ... }:
 let
-  protos = depot.nix.sparseTree {
-    name = "build-protos";
-    root = depot.path.origSrc;
-    paths = [
-      # We need to include castore.proto (only), as it's referred.
-      ../../castore/protos/castore.proto
-      ./build.proto
-      ./rpc_build.proto
-      ../../../buf.yaml
-      ../../../buf.gen.yaml
-    ];
-  };
+  protos = lib.sourceByRegex depot.path.origSrc [
+    "buf.yaml"
+    "buf.gen.yaml"
+    # We need to include castore.proto (only), as it's referred.
+    "^tvix(/castore(/protos(/castore\.proto)?)?)?$"
+    "^tvix(/build(/protos(/.*\.proto)?)?)?$"
+  ];
 in
 depot.nix.readTree.drvTargets {
   inherit protos;
diff --git a/tvix/build/src/bin/tvix-build.rs b/tvix/build/src/bin/tvix-build.rs
index 07d7e30dfda5..b840e031db80 100644
--- a/tvix/build/src/bin/tvix-build.rs
+++ b/tvix/build/src/bin/tvix-build.rs
@@ -1,5 +1,3 @@
-use std::sync::Arc;
-
 use clap::Parser;
 use clap::Subcommand;
 use tokio_listener::Listener;
@@ -7,7 +5,6 @@ use tokio_listener::SystemOptions;
 use tokio_listener::UserOptions;
 use tonic::{self, transport::Server};
 use tracing::{info, Level};
-use tracing_subscriber::prelude::*;
 use tvix_build::{
     buildservice,
     proto::{build_service_server::BuildServiceServer, GRPCBuildServiceWrapper},
@@ -20,11 +17,20 @@ use tvix_build::proto::FILE_DESCRIPTOR_SET;
 #[cfg(feature = "tonic-reflection")]
 use tvix_castore::proto::FILE_DESCRIPTOR_SET as CASTORE_FILE_DESCRIPTOR_SET;
 
+use mimalloc::MiMalloc;
+
+#[global_allocator]
+static GLOBAL: MiMalloc = MiMalloc;
+
 #[derive(Parser)]
 #[command(author, version, about, long_about = None)]
 struct Cli {
-    #[arg(long)]
-    log_level: Option<Level>,
+    /// A global log level to use when printing logs.
+    /// It's also possible to set `RUST_LOG` according to
+    /// `tracing_subscriber::filter::EnvFilter`, which will always have
+    /// priority.
+    #[arg(long, default_value_t=Level::INFO)]
+    log_level: Level,
 
     #[command(subcommand)]
     command: Commands,
@@ -48,19 +54,12 @@ enum Commands {
 }
 
 #[tokio::main]
-async fn main() -> Result<(), Box<dyn std::error::Error>> {
+async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
     let cli = Cli::parse();
 
-    // configure log settings
-    let level = cli.log_level.unwrap_or(Level::INFO);
-
-    tracing_subscriber::registry()
-        .with(
-            tracing_subscriber::fmt::Layer::new()
-                .with_writer(std::io::stderr.with_max_level(level))
-                .pretty(),
-        )
-        .init();
+    let _ = tvix_tracing::TracingBuilder::default()
+        .level(cli.log_level)
+        .enable_progressbar();
 
     match cli.command {
         Commands::Daemon {
@@ -73,12 +72,9 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
             let blob_service = blobservice::from_addr(&blob_service_addr).await?;
             let directory_service = directoryservice::from_addr(&directory_service_addr).await?;
 
-            let build_service = buildservice::from_addr(
-                &build_service_addr,
-                Arc::from(blob_service),
-                Arc::from(directory_service),
-            )
-            .await?;
+            let build_service =
+                buildservice::from_addr(&build_service_addr, blob_service, directory_service)
+                    .await?;
 
             let listen_address = listen_address
                 .unwrap_or_else(|| "[::]:8000".to_string())
@@ -94,11 +90,18 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
 
             #[cfg(feature = "tonic-reflection")]
             {
-                let reflection_svc = tonic_reflection::server::Builder::configure()
-                    .register_encoded_file_descriptor_set(CASTORE_FILE_DESCRIPTOR_SET)
-                    .register_encoded_file_descriptor_set(FILE_DESCRIPTOR_SET)
-                    .build()?;
-                router = router.add_service(reflection_svc);
+                router = router.add_service(
+                    tonic_reflection::server::Builder::configure()
+                        .register_encoded_file_descriptor_set(CASTORE_FILE_DESCRIPTOR_SET)
+                        .register_encoded_file_descriptor_set(FILE_DESCRIPTOR_SET)
+                        .build_v1alpha()?,
+                );
+                router = router.add_service(
+                    tonic_reflection::server::Builder::configure()
+                        .register_encoded_file_descriptor_set(CASTORE_FILE_DESCRIPTOR_SET)
+                        .register_encoded_file_descriptor_set(FILE_DESCRIPTOR_SET)
+                        .build_v1()?,
+                );
             }
 
             info!(listen_address=%listen_address, "listening");
diff --git a/tvix/build/src/proto/mod.rs b/tvix/build/src/proto/mod.rs
index e359b5b5b70e..b36049d05b9d 100644
--- a/tvix/build/src/proto/mod.rs
+++ b/tvix/build/src/proto/mod.rs
@@ -1,7 +1,7 @@
 use std::path::{Path, PathBuf};
 
 use itertools::Itertools;
-use tvix_castore::proto::{NamedNode, ValidateNodeError};
+use tvix_castore::DirectoryError;
 
 mod grpc_buildservice_wrapper;
 
@@ -19,7 +19,7 @@ pub const FILE_DESCRIPTOR_SET: &[u8] = tonic::include_file_descriptor_set!("tvix
 #[derive(Debug, thiserror::Error)]
 pub enum ValidateBuildRequestError {
     #[error("invalid input node at position {0}: {1}")]
-    InvalidInputNode(usize, ValidateNodeError),
+    InvalidInputNode(usize, DirectoryError),
 
     #[error("input nodes are not sorted by name")]
     InputNodesNotSorted,
@@ -123,20 +123,21 @@ impl BuildRequest {
     /// and all restrictions around paths themselves (relative, clean, โ€ฆ) need
     // to be fulfilled.
     pub fn validate(&self) -> Result<(), ValidateBuildRequestError> {
-        // validate all input nodes
-        for (i, n) in self.inputs.iter().enumerate() {
-            // ensure the input node itself is valid
-            n.validate()
+        // validate names. Make sure they're sorted
+
+        let mut last_name: bytes::Bytes = "".into();
+        for (i, node) in self.inputs.iter().enumerate() {
+            // TODO(flokli): store result somewhere
+            let (name, _node) = node
+                .clone()
+                .into_name_and_node()
                 .map_err(|e| ValidateBuildRequestError::InvalidInputNode(i, e))?;
-        }
 
-        // now we can look at the names, and make sure they're sorted.
-        if !is_sorted(
-            self.inputs
-                .iter()
-                .map(|e| e.node.as_ref().unwrap().get_name()),
-        ) {
-            Err(ValidateBuildRequestError::InputNodesNotSorted)?
+            if name.as_ref() <= last_name.as_ref() {
+                return Err(ValidateBuildRequestError::InputNodesNotSorted);
+            } else {
+                last_name = name.into()
+            }
         }
 
         // validate working_dir
diff --git a/tvix/castore-go/castore.pb.go b/tvix/castore-go/castore.pb.go
index 464f1d4a4183..1adfbe34d6b0 100644
--- a/tvix/castore-go/castore.pb.go
+++ b/tvix/castore-go/castore.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.2
 // 	protoc        (unknown)
 // source: tvix/castore/protos/castore.proto
 
@@ -466,7 +466,7 @@ func file_tvix_castore_protos_castore_proto_rawDescGZIP() []byte {
 }
 
 var file_tvix_castore_protos_castore_proto_msgTypes = make([]protoimpl.MessageInfo, 5)
-var file_tvix_castore_protos_castore_proto_goTypes = []interface{}{
+var file_tvix_castore_protos_castore_proto_goTypes = []any{
 	(*Directory)(nil),     // 0: tvix.castore.v1.Directory
 	(*DirectoryNode)(nil), // 1: tvix.castore.v1.DirectoryNode
 	(*FileNode)(nil),      // 2: tvix.castore.v1.FileNode
@@ -493,7 +493,7 @@ func file_tvix_castore_protos_castore_proto_init() {
 		return
 	}
 	if !protoimpl.UnsafeEnabled {
-		file_tvix_castore_protos_castore_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_castore_protos_castore_proto_msgTypes[0].Exporter = func(v any, i int) any {
 			switch v := v.(*Directory); i {
 			case 0:
 				return &v.state
@@ -505,7 +505,7 @@ func file_tvix_castore_protos_castore_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_castore_protos_castore_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_castore_protos_castore_proto_msgTypes[1].Exporter = func(v any, i int) any {
 			switch v := v.(*DirectoryNode); i {
 			case 0:
 				return &v.state
@@ -517,7 +517,7 @@ func file_tvix_castore_protos_castore_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_castore_protos_castore_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_castore_protos_castore_proto_msgTypes[2].Exporter = func(v any, i int) any {
 			switch v := v.(*FileNode); i {
 			case 0:
 				return &v.state
@@ -529,7 +529,7 @@ func file_tvix_castore_protos_castore_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_castore_protos_castore_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_castore_protos_castore_proto_msgTypes[3].Exporter = func(v any, i int) any {
 			switch v := v.(*SymlinkNode); i {
 			case 0:
 				return &v.state
@@ -541,7 +541,7 @@ func file_tvix_castore_protos_castore_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_castore_protos_castore_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_castore_protos_castore_proto_msgTypes[4].Exporter = func(v any, i int) any {
 			switch v := v.(*Node); i {
 			case 0:
 				return &v.state
@@ -554,7 +554,7 @@ func file_tvix_castore_protos_castore_proto_init() {
 			}
 		}
 	}
-	file_tvix_castore_protos_castore_proto_msgTypes[4].OneofWrappers = []interface{}{
+	file_tvix_castore_protos_castore_proto_msgTypes[4].OneofWrappers = []any{
 		(*Node_Directory)(nil),
 		(*Node_File)(nil),
 		(*Node_Symlink)(nil),
diff --git a/tvix/castore-go/rpc_blobstore.pb.go b/tvix/castore-go/rpc_blobstore.pb.go
index 3607a65bbe20..a6ee6748b868 100644
--- a/tvix/castore-go/rpc_blobstore.pb.go
+++ b/tvix/castore-go/rpc_blobstore.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.2
 // 	protoc        (unknown)
 // source: tvix/castore/protos/rpc_blobstore.proto
 
@@ -415,7 +415,7 @@ func file_tvix_castore_protos_rpc_blobstore_proto_rawDescGZIP() []byte {
 }
 
 var file_tvix_castore_protos_rpc_blobstore_proto_msgTypes = make([]protoimpl.MessageInfo, 6)
-var file_tvix_castore_protos_rpc_blobstore_proto_goTypes = []interface{}{
+var file_tvix_castore_protos_rpc_blobstore_proto_goTypes = []any{
 	(*StatBlobRequest)(nil),            // 0: tvix.castore.v1.StatBlobRequest
 	(*StatBlobResponse)(nil),           // 1: tvix.castore.v1.StatBlobResponse
 	(*ReadBlobRequest)(nil),            // 2: tvix.castore.v1.ReadBlobRequest
@@ -444,7 +444,7 @@ func file_tvix_castore_protos_rpc_blobstore_proto_init() {
 		return
 	}
 	if !protoimpl.UnsafeEnabled {
-		file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[0].Exporter = func(v any, i int) any {
 			switch v := v.(*StatBlobRequest); i {
 			case 0:
 				return &v.state
@@ -456,7 +456,7 @@ func file_tvix_castore_protos_rpc_blobstore_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[1].Exporter = func(v any, i int) any {
 			switch v := v.(*StatBlobResponse); i {
 			case 0:
 				return &v.state
@@ -468,7 +468,7 @@ func file_tvix_castore_protos_rpc_blobstore_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[2].Exporter = func(v any, i int) any {
 			switch v := v.(*ReadBlobRequest); i {
 			case 0:
 				return &v.state
@@ -480,7 +480,7 @@ func file_tvix_castore_protos_rpc_blobstore_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[3].Exporter = func(v any, i int) any {
 			switch v := v.(*BlobChunk); i {
 			case 0:
 				return &v.state
@@ -492,7 +492,7 @@ func file_tvix_castore_protos_rpc_blobstore_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[4].Exporter = func(v any, i int) any {
 			switch v := v.(*PutBlobResponse); i {
 			case 0:
 				return &v.state
@@ -504,7 +504,7 @@ func file_tvix_castore_protos_rpc_blobstore_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[5].Exporter = func(v any, i int) any {
 			switch v := v.(*StatBlobResponse_ChunkMeta); i {
 			case 0:
 				return &v.state
diff --git a/tvix/castore-go/rpc_directory.pb.go b/tvix/castore-go/rpc_directory.pb.go
index 78c4a243e3ff..ca86909538c5 100644
--- a/tvix/castore-go/rpc_directory.pb.go
+++ b/tvix/castore-go/rpc_directory.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.2
 // 	protoc        (unknown)
 // source: tvix/castore/protos/rpc_directory.proto
 
@@ -199,7 +199,7 @@ func file_tvix_castore_protos_rpc_directory_proto_rawDescGZIP() []byte {
 }
 
 var file_tvix_castore_protos_rpc_directory_proto_msgTypes = make([]protoimpl.MessageInfo, 2)
-var file_tvix_castore_protos_rpc_directory_proto_goTypes = []interface{}{
+var file_tvix_castore_protos_rpc_directory_proto_goTypes = []any{
 	(*GetDirectoryRequest)(nil),  // 0: tvix.castore.v1.GetDirectoryRequest
 	(*PutDirectoryResponse)(nil), // 1: tvix.castore.v1.PutDirectoryResponse
 	(*Directory)(nil),            // 2: tvix.castore.v1.Directory
@@ -223,7 +223,7 @@ func file_tvix_castore_protos_rpc_directory_proto_init() {
 	}
 	file_tvix_castore_protos_castore_proto_init()
 	if !protoimpl.UnsafeEnabled {
-		file_tvix_castore_protos_rpc_directory_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_castore_protos_rpc_directory_proto_msgTypes[0].Exporter = func(v any, i int) any {
 			switch v := v.(*GetDirectoryRequest); i {
 			case 0:
 				return &v.state
@@ -235,7 +235,7 @@ func file_tvix_castore_protos_rpc_directory_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_castore_protos_rpc_directory_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_castore_protos_rpc_directory_proto_msgTypes[1].Exporter = func(v any, i int) any {
 			switch v := v.(*PutDirectoryResponse); i {
 			case 0:
 				return &v.state
@@ -248,7 +248,7 @@ func file_tvix_castore_protos_rpc_directory_proto_init() {
 			}
 		}
 	}
-	file_tvix_castore_protos_rpc_directory_proto_msgTypes[0].OneofWrappers = []interface{}{
+	file_tvix_castore_protos_rpc_directory_proto_msgTypes[0].OneofWrappers = []any{
 		(*GetDirectoryRequest_Digest)(nil),
 	}
 	type x struct{}
diff --git a/tvix/castore/Cargo.toml b/tvix/castore/Cargo.toml
index 4cbc29053b22..b6752ae25af5 100644
--- a/tvix/castore/Cargo.toml
+++ b/tvix/castore/Cargo.toml
@@ -4,102 +4,78 @@ version = "0.1.0"
 edition = "2021"
 
 [dependencies]
-async-compression = { version = "0.4.9", features = ["tokio", "zstd"]}
-async-stream = "0.3.5"
-async-tempfile = "0.4.0"
-blake3 = { version = "1.3.1", features = ["rayon", "std", "traits-preview"] }
-bstr = "1.6.0"
-bytes = "1.4.0"
-data-encoding = "2.3.3"
-digest = "0.10.7"
-fastcdc = { version = "3.1.0", features = ["tokio"] }
-futures = "0.3.30"
-lazy_static = "1.4.0"
-object_store = { version = "0.9.1", features = ["http"] }
-parking_lot = "0.12.1"
-pin-project-lite = "0.2.13"
-prost = "0.12.1"
-sled = { version = "0.34.7" }
-thiserror = "1.0.38"
-tokio-stream = { version = "0.1.14", features = ["fs", "net"] }
-tokio-util = { version = "0.7.9", features = ["io", "io-util", "codec"] }
-tokio-tar = "0.3.1"
-tokio = { version = "1.32.0", features = ["fs", "macros", "net", "rt", "rt-multi-thread", "signal"] }
-tonic = "0.11.0"
-tower = "0.4.13"
-tracing = "0.1.37"
-url = "2.4.0"
-walkdir = "2.4.0"
-zstd = "0.13.0"
-serde = { version = "1.0.197", features = [ "derive" ] }
-serde_with = "3.7.0"
-serde_qs = "0.12.0"
-petgraph = "0.6.4"
-
-[dependencies.bigtable_rs]
-optional = true
-# https://github.com/liufuyang/bigtable_rs/pull/72
-git = "https://github.com/flokli/bigtable_rs"
-rev = "0af404741dfc40eb9fa99cf4d4140a09c5c20df7"
-
-[dependencies.fuse-backend-rs]
-optional = true
-version = "0.11.0"
-
-[dependencies.libc]
-optional = true
-version = "0.2.144"
-
-[dependencies.tonic-reflection]
-optional = true
-version = "0.11.0"
-
-[dependencies.vhost]
-optional = true
-version = "0.6"
-
-[dependencies.vhost-user-backend]
-optional = true
-version = "0.8"
-
-[dependencies.virtio-queue]
-optional = true
-version = "0.7"
-
-[dependencies.vm-memory]
-optional = true
-version = "0.10"
-
-[dependencies.vmm-sys-util]
-optional = true
-version = "0.11"
-
-[dependencies.virtio-bindings]
-optional = true
-version = "0.2.1"
+async-compression = { workspace = true, features = ["tokio", "zstd"] }
+async-stream = { workspace = true }
+async-tempfile = { workspace = true }
+blake3 = { workspace = true, features = ["rayon", "std", "traits-preview"] }
+bstr = { workspace = true }
+bytes = { workspace = true }
+data-encoding = { workspace = true }
+digest = { workspace = true }
+fastcdc = { workspace = true, features = ["tokio"] }
+futures = { workspace = true }
+lazy_static = { workspace = true }
+object_store = { workspace = true, features = ["http"] }
+parking_lot = { workspace = true }
+pin-project-lite = { workspace = true }
+prost = { workspace = true }
+sled = { workspace = true }
+thiserror = { workspace = true }
+tokio-stream = { workspace = true, features = ["fs", "net"] }
+tokio-util = { workspace = true, features = ["io", "io-util", "codec"] }
+tokio-tar = { workspace = true }
+tokio = { workspace = true, features = ["fs", "macros", "net", "rt", "rt-multi-thread", "signal"] }
+tonic = { workspace = true }
+tower = { workspace = true }
+tracing = { workspace = true }
+tracing-indicatif = { workspace = true }
+tvix-tracing = { path = "../tracing", features = ["tonic"] }
+url = { workspace = true }
+walkdir = { workspace = true }
+zstd = { workspace = true }
+serde = { workspace = true, features = ["derive"] }
+serde_with = { workspace = true }
+serde_qs = { workspace = true }
+petgraph = { workspace = true }
+erased-serde = { workspace = true }
+serde_tagged = { workspace = true }
+hyper-util = { workspace = true }
+redb = { workspace = true }
+bigtable_rs = { workspace = true, optional = true }
+fuse-backend-rs = { workspace = true, optional = true }
+libc = { workspace = true, optional = true }
+threadpool = { workspace = true, optional = true }
+tonic-reflection = { workspace = true, optional = true }
+vhost = { workspace = true, optional = true }
+vhost-user-backend = { workspace = true, optional = true }
+virtio-queue = { workspace = true, optional = true }
+vm-memory = { workspace = true, optional = true }
+vmm-sys-util = { workspace = true, optional = true }
+virtio-bindings = { workspace = true, optional = true }
 
 [build-dependencies]
-prost-build = "0.12.1"
-tonic-build = "0.11.0"
+prost-build = { workspace = true }
+tonic-build = { workspace = true }
 
 [dev-dependencies]
-async-process = "2.1.0"
-rstest = "0.19.0"
-tempfile = "3.3.0"
-tokio-retry = "0.3.0"
-hex-literal = "0.4.1"
-rstest_reuse = "0.6.0"
-xattr = "1.3.1"
+async-process = { workspace = true }
+rstest = { workspace = true }
+tempfile = { workspace = true }
+tokio-retry = { workspace = true }
+hex-literal = { workspace = true }
+rstest_reuse = { workspace = true }
+xattr = { workspace = true }
+serde_json = { workspace = true }
 
 [features]
-default = []
+default = ["cloud"]
 cloud = [
   "dep:bigtable_rs",
   "object_store/aws",
   "object_store/azure",
   "object_store/gcp",
 ]
-fs = ["dep:libc", "dep:fuse-backend-rs"]
+fs = ["dep:fuse-backend-rs", "dep:threadpool", "dep:libc"]
 virtiofs = [
   "fs",
   "dep:vhost",
diff --git a/tvix/castore/build.rs b/tvix/castore/build.rs
index 089c093e71b4..a4591c845509 100644
--- a/tvix/castore/build.rs
+++ b/tvix/castore/build.rs
@@ -12,17 +12,13 @@ fn main() -> Result<()> {
         builder = builder.file_descriptor_set_path(descriptor_path);
     };
 
-    // https://github.com/hyperium/tonic/issues/908
-    let mut config = prost_build::Config::new();
-    config.bytes(["."]);
-    config.type_attribute(".", "#[derive(Eq, Hash)]");
-
     builder
         .build_server(true)
         .build_client(true)
         .emit_rerun_if_changed(false)
-        .compile_with_config(
-            config,
+        .bytes(["."])
+        .type_attribute(".", "#[derive(Eq, Hash)]")
+        .compile(
             &[
                 "tvix/castore/protos/castore.proto",
                 "tvix/castore/protos/rpc_blobstore.proto",
@@ -30,7 +26,7 @@ fn main() -> Result<()> {
             ],
             // If we are in running `cargo build` manually, using `../..` works fine,
             // but in case we run inside a nix build, we need to instead point PROTO_ROOT
-            // to a sparseTree containing that structure.
+            // to a custom tree containing that structure.
             &[match std::env::var_os("PROTO_ROOT") {
                 Some(proto_root) => proto_root.to_str().unwrap().to_owned(),
                 None => "../..".to_string(),
diff --git a/tvix/castore/default.nix b/tvix/castore/default.nix
index 641d88376072..9c210884f6e3 100644
--- a/tvix/castore/default.nix
+++ b/tvix/castore/default.nix
@@ -1,23 +1,28 @@
-{ depot, pkgs, ... }:
+{ depot, pkgs, lib, ... }:
 
 (depot.tvix.crates.workspaceMembers.tvix-castore.build.override {
   runTests = true;
   testPreRun = ''
-    export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt;
+    export SSL_CERT_FILE=/dev/null
   '';
-
-  # enable some optional features.
-  features = [ "default" "cloud" ];
-}).overrideAttrs (_: {
-  meta.ci.targets = [ "integration-tests" ];
-  passthru.integration-tests = depot.tvix.crates.workspaceMembers.tvix-castore.build.override {
-    runTests = true;
-    testPreRun = ''
-      export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt;
-      export PATH="$PATH:${pkgs.lib.makeBinPath [pkgs.cbtemulator pkgs.google-cloud-bigtable-tool]}"
+}).overrideAttrs (old: rec {
+  meta.ci.targets = [ "integration-tests" ] ++ lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+  passthru = (depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = ([ "cloud" "fuse" "tonic-reflection" ]
+      # virtiofs feature currently fails to build on Darwin
+      ++ lib.optional pkgs.stdenv.isLinux "virtiofs");
+    override.testPreRun = ''
+      export SSL_CERT_FILE=/dev/null
     '';
-
-    # enable some optional features.
-    features = [ "default" "cloud" "integration" ];
+  }) // {
+    integration-tests = depot.tvix.crates.workspaceMembers.${old.crateName}.build.override (old: {
+      runTests = true;
+      testPreRun = ''
+        export SSL_CERT_FILE=/dev/null
+        export PATH="$PATH:${pkgs.lib.makeBinPath [ pkgs.cbtemulator pkgs.google-cloud-bigtable-tool ]}"
+      '';
+      features = old.features ++ [ "integration" ];
+    });
   };
 })
diff --git a/tvix/castore/protos/default.nix b/tvix/castore/protos/default.nix
index feef55690fb9..08bb8fcfeef1 100644
--- a/tvix/castore/protos/default.nix
+++ b/tvix/castore/protos/default.nix
@@ -1,16 +1,10 @@
-{ depot, pkgs, ... }:
+{ depot, pkgs, lib, ... }:
 let
-  protos = depot.nix.sparseTree {
-    name = "castore-protos";
-    root = depot.path.origSrc;
-    paths = [
-      ./castore.proto
-      ./rpc_blobstore.proto
-      ./rpc_directory.proto
-      ../../../buf.yaml
-      ../../../buf.gen.yaml
-    ];
-  };
+  protos = lib.sourceByRegex depot.path.origSrc [
+    "buf.yaml"
+    "buf.gen.yaml"
+    "^tvix(/castore(/protos(/.*\.proto)?)?)?$"
+  ];
 in
 depot.nix.readTree.drvTargets {
   inherit protos;
diff --git a/tvix/castore/src/blobservice/combinator.rs b/tvix/castore/src/blobservice/combinator.rs
index 067eff96f488..6a964c8a8440 100644
--- a/tvix/castore/src/blobservice/combinator.rs
+++ b/tvix/castore/src/blobservice/combinator.rs
@@ -1,11 +1,12 @@
-use futures::{StreamExt, TryStreamExt};
-use tokio_util::io::{ReaderStream, StreamReader};
+use std::sync::Arc;
+
 use tonic::async_trait;
-use tracing::{instrument, warn};
+use tracing::instrument;
 
-use crate::B3Digest;
+use crate::composition::{CompositionContext, ServiceBuilder};
+use crate::{B3Digest, Error};
 
-use super::{naive_seeker::NaiveSeeker, BlobReader, BlobService, BlobWriter};
+use super::{BlobReader, BlobService, BlobWriter, ChunkedReader};
 
 /// Combinator for a BlobService, using a "local" and "remote" blobservice.
 /// Requests are tried in (and returned from) the local store first, only if
@@ -68,19 +69,16 @@ where
                     // otherwise, a chunked reader, which will always try the
                     // local backend first.
 
-                    // map Vec<ChunkMeta> to Vec<(B3Digest, u64)>
-                    let chunks: Vec<(B3Digest, u64)> = remote_chunks
-                        .into_iter()
-                        .map(|chunk_meta| {
+                    let chunked_reader = ChunkedReader::from_chunks(
+                        remote_chunks.into_iter().map(|chunk| {
                             (
-                                B3Digest::try_from(chunk_meta.digest)
-                                    .expect("invalid chunk digest"),
-                                chunk_meta.size,
+                                chunk.digest.try_into().expect("invalid b3 digest"),
+                                chunk.size,
                             )
-                        })
-                        .collect();
-
-                    Ok(Some(make_chunked_reader(self.clone(), chunks)))
+                        }),
+                        Arc::new(self.clone()) as Arc<dyn BlobService>,
+                    );
+                    Ok(Some(Box::new(chunked_reader)))
                 }
             }
         }
@@ -93,40 +91,38 @@ where
     }
 }
 
-fn make_chunked_reader<BS>(
-    // This must consume, as we can't retain references to blob_service,
-    // as it'd add a lifetime to BlobReader in general, which will get
-    // problematic in TvixStoreFs, which is using async move closures and cloning.
-    blob_service: BS,
-    // A list of b3 digests for individual chunks, and their sizes.
-    chunks: Vec<(B3Digest, u64)>,
-) -> Box<dyn BlobReader>
-where
-    BS: BlobService + Clone + 'static,
-{
-    // TODO: offset, verified streaming
-
-    // construct readers for each chunk
-    let blob_service = blob_service.clone();
-    let readers_stream = tokio_stream::iter(chunks).map(move |(digest, _)| {
-        let d = digest.to_owned();
-        let blob_service = blob_service.clone();
-        async move {
-            blob_service.open_read(&d.to_owned()).await?.ok_or_else(|| {
-                warn!(chunk.digest = %digest, "chunk not found");
-                std::io::Error::new(std::io::ErrorKind::NotFound, "chunk not found")
-            })
-        }
-    });
-
-    // convert the stream of readers to a stream of streams of byte chunks
-    let bytes_streams = readers_stream.then(|elem| async { elem.await.map(ReaderStream::new) });
-
-    // flatten into one stream of byte chunks
-    let bytes_stream = bytes_streams.try_flatten();
+#[derive(serde::Deserialize, Debug, Clone)]
+#[serde(deny_unknown_fields)]
+pub struct CombinedBlobServiceConfig {
+    local: String,
+    remote: String,
+}
 
-    // convert into AsyncRead
-    let blob_reader = StreamReader::new(bytes_stream);
+impl TryFrom<url::Url> for CombinedBlobServiceConfig {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    fn try_from(_url: url::Url) -> Result<Self, Self::Error> {
+        Err(Error::StorageError(
+            "Instantiating a CombinedBlobService from a url is not supported".into(),
+        )
+        .into())
+    }
+}
 
-    Box::new(NaiveSeeker::new(Box::pin(blob_reader)))
+#[async_trait]
+impl ServiceBuilder for CombinedBlobServiceConfig {
+    type Output = dyn BlobService;
+    async fn build<'a>(
+        &'a self,
+        _instance_name: &str,
+        context: &CompositionContext,
+    ) -> Result<Arc<dyn BlobService>, Box<dyn std::error::Error + Send + Sync>> {
+        let (local, remote) = futures::join!(
+            context.resolve(self.local.clone()),
+            context.resolve(self.remote.clone())
+        );
+        Ok(Arc::new(CombinedBlobService {
+            local: local?,
+            remote: remote?,
+        }))
+    }
 }
diff --git a/tvix/castore/src/blobservice/from_addr.rs b/tvix/castore/src/blobservice/from_addr.rs
index 8898bbfb95ce..c5cabaa9d945 100644
--- a/tvix/castore/src/blobservice/from_addr.rs
+++ b/tvix/castore/src/blobservice/from_addr.rs
@@ -1,8 +1,12 @@
+use std::sync::Arc;
+
 use url::Url;
 
-use crate::{proto::blob_service_client::BlobServiceClient, Error};
+use crate::composition::{
+    with_registry, CompositionContext, DeserializeWithRegistry, ServiceBuilder, REG,
+};
 
-use super::{BlobService, GRPCBlobService, MemoryBlobService, ObjectStoreBlobService};
+use super::BlobService;
 
 /// Constructs a new instance of a [BlobService] from an URI.
 ///
@@ -12,46 +16,19 @@ use super::{BlobService, GRPCBlobService, MemoryBlobService, ObjectStoreBlobServ
 /// - `objectstore+*://` ([ObjectStoreBlobService])
 ///
 /// See their `from_url` methods for more details about their syntax.
-pub async fn from_addr(uri: &str) -> Result<Box<dyn BlobService>, crate::Error> {
+pub async fn from_addr(
+    uri: &str,
+) -> Result<Arc<dyn BlobService>, Box<dyn std::error::Error + Send + Sync>> {
     let url = Url::parse(uri)
         .map_err(|e| crate::Error::StorageError(format!("unable to parse url: {}", e)))?;
 
-    let blob_service: Box<dyn BlobService> = match url.scheme() {
-        "memory" => {
-            // memory doesn't support host or path in the URL.
-            if url.has_host() || !url.path().is_empty() {
-                return Err(Error::StorageError("invalid url".to_string()));
-            }
-            Box::<MemoryBlobService>::default()
-        }
-        scheme if scheme.starts_with("grpc+") => {
-            // schemes starting with grpc+ go to the GRPCPathInfoService.
-            //   That's normally grpc+unix for unix sockets, and grpc+http(s) for the HTTP counterparts.
-            // - In the case of unix sockets, there must be a path, but may not be a host.
-            // - In the case of non-unix sockets, there must be a host, but no path.
-            // Constructing the channel is handled by tvix_castore::channel::from_url.
-            let client = BlobServiceClient::new(crate::tonic::channel_from_url(&url).await?);
-            Box::new(GRPCBlobService::from_client(client))
-        }
-        scheme if scheme.starts_with("objectstore+") => {
-            // We need to convert the URL to string, strip the prefix there, and then
-            // parse it back as url, as Url::set_scheme() rejects some of the transitions we want to do.
-            let trimmed_url = {
-                let s = url.to_string();
-                Url::parse(s.strip_prefix("objectstore+").unwrap()).unwrap()
-            };
-            Box::new(
-                ObjectStoreBlobService::parse_url(&trimmed_url)
-                    .map_err(|e| Error::StorageError(e.to_string()))?,
-            )
-        }
-        scheme => {
-            return Err(crate::Error::StorageError(format!(
-                "unknown scheme: {}",
-                scheme
-            )))
-        }
-    };
+    let blob_service_config = with_registry(&REG, || {
+        <DeserializeWithRegistry<Box<dyn ServiceBuilder<Output = dyn BlobService>>>>::try_from(url)
+    })?
+    .0;
+    let blob_service = blob_service_config
+        .build("anonymous", &CompositionContext::blank())
+        .await?;
 
     Ok(blob_service)
 }
diff --git a/tvix/castore/src/blobservice/grpc.rs b/tvix/castore/src/blobservice/grpc.rs
index 5663cd3838ec..0db3dfea4ad8 100644
--- a/tvix/castore/src/blobservice/grpc.rs
+++ b/tvix/castore/src/blobservice/grpc.rs
@@ -1,4 +1,5 @@
 use super::{BlobReader, BlobService, BlobWriter, ChunkedReader};
+use crate::composition::{CompositionContext, ServiceBuilder};
 use crate::{
     proto::{self, stat_blob_response::ChunkMeta},
     B3Digest,
@@ -17,40 +18,43 @@ use tokio_util::{
     io::{CopyToBytes, SinkWriter},
     sync::PollSender,
 };
-use tonic::{async_trait, transport::Channel, Code, Status};
-use tracing::instrument;
+use tonic::{async_trait, Code, Status};
+use tracing::{instrument, Instrument as _};
 
 /// Connects to a (remote) tvix-store BlobService over gRPC.
 #[derive(Clone)]
-pub struct GRPCBlobService {
+pub struct GRPCBlobService<T> {
     /// The internal reference to a gRPC client.
     /// Cloning it is cheap, and it internally handles concurrent requests.
-    grpc_client: proto::blob_service_client::BlobServiceClient<Channel>,
+    grpc_client: proto::blob_service_client::BlobServiceClient<T>,
 }
 
-impl GRPCBlobService {
+impl<T> GRPCBlobService<T> {
     /// construct a [GRPCBlobService] from a [proto::blob_service_client::BlobServiceClient].
-    /// panics if called outside the context of a tokio runtime.
-    pub fn from_client(
-        grpc_client: proto::blob_service_client::BlobServiceClient<Channel>,
-    ) -> Self {
+    pub fn from_client(grpc_client: proto::blob_service_client::BlobServiceClient<T>) -> Self {
         Self { grpc_client }
     }
 }
 
 #[async_trait]
-impl BlobService for GRPCBlobService {
+impl<T> BlobService for GRPCBlobService<T>
+where
+    T: tonic::client::GrpcService<tonic::body::BoxBody> + Send + Sync + Clone + 'static,
+    T::ResponseBody: tonic::codegen::Body<Data = tonic::codegen::Bytes> + Send + 'static,
+    <T::ResponseBody as tonic::codegen::Body>::Error: Into<tonic::codegen::StdError> + Send,
+    T::Future: Send,
+{
     #[instrument(skip(self, digest), fields(blob.digest=%digest))]
     async fn has(&self, digest: &B3Digest) -> io::Result<bool> {
-        let mut grpc_client = self.grpc_client.clone();
-        let resp = grpc_client
+        match self
+            .grpc_client
+            .clone()
             .stat(proto::StatBlobRequest {
                 digest: digest.clone().into(),
                 ..Default::default()
             })
-            .await;
-
-        match resp {
+            .await
+        {
             Ok(_blob_meta) => Ok(true),
             Err(e) if e.code() == Code::NotFound => Ok(false),
             Err(e) => Err(io::Error::new(io::ErrorKind::Other, e)),
@@ -133,6 +137,8 @@ impl BlobService for GRPCBlobService {
         let task = tokio::spawn({
             let mut grpc_client = self.grpc_client.clone();
             async move { Ok::<_, Status>(grpc_client.put(blobchunk_stream).await?.into_inner()) }
+                // instrument the task with the current span, this is not done by default
+                .in_current_span()
         });
 
         // The tx part of the channel is converted to a sink of byte chunks.
@@ -175,6 +181,40 @@ impl BlobService for GRPCBlobService {
     }
 }
 
+#[derive(serde::Deserialize, Debug)]
+#[serde(deny_unknown_fields)]
+pub struct GRPCBlobServiceConfig {
+    url: String,
+}
+
+impl TryFrom<url::Url> for GRPCBlobServiceConfig {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    fn try_from(url: url::Url) -> Result<Self, Self::Error> {
+        //   normally grpc+unix for unix sockets, and grpc+http(s) for the HTTP counterparts.
+        // - In the case of unix sockets, there must be a path, but may not be a host.
+        // - In the case of non-unix sockets, there must be a host, but no path.
+        // Constructing the channel is handled by tvix_castore::channel::from_url.
+        Ok(GRPCBlobServiceConfig {
+            url: url.to_string(),
+        })
+    }
+}
+
+#[async_trait]
+impl ServiceBuilder for GRPCBlobServiceConfig {
+    type Output = dyn BlobService;
+    async fn build<'a>(
+        &'a self,
+        _instance_name: &str,
+        _context: &CompositionContext,
+    ) -> Result<Arc<dyn BlobService>, Box<dyn std::error::Error + Send + Sync + 'static>> {
+        let client = proto::blob_service_client::BlobServiceClient::new(
+            crate::tonic::channel_from_url(&self.url.parse()?).await?,
+        );
+        Ok(Arc::new(GRPCBlobService::from_client(client)))
+    }
+}
+
 pub struct GRPCBlobWriter<W: tokio::io::AsyncWrite> {
     /// The task containing the put request, and the inner writer, if we're still writing.
     task_and_writer: Option<(JoinHandle<Result<proto::PutBlobResponse, Status>>, W)>,
@@ -335,7 +375,6 @@ mod tests {
                     .await
                     .expect("must succeed"),
             );
-
             GRPCBlobService::from_client(client)
         };
 
diff --git a/tvix/castore/src/blobservice/memory.rs b/tvix/castore/src/blobservice/memory.rs
index 873d06b461de..3d733f950470 100644
--- a/tvix/castore/src/blobservice/memory.rs
+++ b/tvix/castore/src/blobservice/memory.rs
@@ -6,7 +6,8 @@ use tonic::async_trait;
 use tracing::instrument;
 
 use super::{BlobReader, BlobService, BlobWriter};
-use crate::B3Digest;
+use crate::composition::{CompositionContext, ServiceBuilder};
+use crate::{B3Digest, Error};
 
 #[derive(Clone, Default)]
 pub struct MemoryBlobService {
@@ -37,6 +38,33 @@ impl BlobService for MemoryBlobService {
     }
 }
 
+#[derive(serde::Deserialize, Debug)]
+#[serde(deny_unknown_fields)]
+pub struct MemoryBlobServiceConfig {}
+
+impl TryFrom<url::Url> for MemoryBlobServiceConfig {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    fn try_from(url: url::Url) -> Result<Self, Self::Error> {
+        // memory doesn't support host or path in the URL.
+        if url.has_host() || !url.path().is_empty() {
+            return Err(Error::StorageError("invalid url".to_string()).into());
+        }
+        Ok(MemoryBlobServiceConfig {})
+    }
+}
+
+#[async_trait]
+impl ServiceBuilder for MemoryBlobServiceConfig {
+    type Output = dyn BlobService;
+    async fn build<'a>(
+        &'a self,
+        _instance_name: &str,
+        _context: &CompositionContext,
+    ) -> Result<Arc<dyn BlobService>, Box<dyn std::error::Error + Send + Sync + 'static>> {
+        Ok(Arc::new(MemoryBlobService::default()))
+    }
+}
+
 pub struct MemoryBlobWriter {
     db: Arc<RwLock<HashMap<B3Digest, Vec<u8>>>>,
 
diff --git a/tvix/castore/src/blobservice/mod.rs b/tvix/castore/src/blobservice/mod.rs
index 50acd40bf769..85292722fa7e 100644
--- a/tvix/castore/src/blobservice/mod.rs
+++ b/tvix/castore/src/blobservice/mod.rs
@@ -1,6 +1,8 @@
 use std::io;
+
 use tonic::async_trait;
 
+use crate::composition::{Registry, ServiceBuilder};
 use crate::proto::stat_blob_response::ChunkMeta;
 use crate::B3Digest;
 
@@ -9,18 +11,17 @@ mod combinator;
 mod from_addr;
 mod grpc;
 mod memory;
-mod naive_seeker;
 mod object_store;
 
 #[cfg(test)]
 pub mod tests;
 
 pub use self::chunked_reader::ChunkedReader;
-pub use self::combinator::CombinedBlobService;
+pub use self::combinator::{CombinedBlobService, CombinedBlobServiceConfig};
 pub use self::from_addr::from_addr;
-pub use self::grpc::GRPCBlobService;
-pub use self::memory::MemoryBlobService;
-pub use self::object_store::ObjectStoreBlobService;
+pub use self::grpc::{GRPCBlobService, GRPCBlobServiceConfig};
+pub use self::memory::{MemoryBlobService, MemoryBlobServiceConfig};
+pub use self::object_store::{ObjectStoreBlobService, ObjectStoreBlobServiceConfig};
 
 /// The base trait all BlobService services need to implement.
 /// It provides functions to check whether a given blob exists,
@@ -101,3 +102,11 @@ impl BlobReader for io::Cursor<&'static [u8; 0]> {}
 impl BlobReader for io::Cursor<Vec<u8>> {}
 impl BlobReader for io::Cursor<bytes::Bytes> {}
 impl BlobReader for tokio::fs::File {}
+
+/// Registers the builtin BlobService implementations with the registry
+pub(crate) fn register_blob_services(reg: &mut Registry) {
+    reg.register::<Box<dyn ServiceBuilder<Output = dyn BlobService>>, super::blobservice::ObjectStoreBlobServiceConfig>("objectstore");
+    reg.register::<Box<dyn ServiceBuilder<Output = dyn BlobService>>, super::blobservice::MemoryBlobServiceConfig>("memory");
+    reg.register::<Box<dyn ServiceBuilder<Output = dyn BlobService>>, super::blobservice::CombinedBlobServiceConfig>("combined");
+    reg.register::<Box<dyn ServiceBuilder<Output = dyn BlobService>>, super::blobservice::GRPCBlobServiceConfig>("grpc");
+}
diff --git a/tvix/castore/src/blobservice/naive_seeker.rs b/tvix/castore/src/blobservice/naive_seeker.rs
deleted file mode 100644
index f5a530715093..000000000000
--- a/tvix/castore/src/blobservice/naive_seeker.rs
+++ /dev/null
@@ -1,265 +0,0 @@
-use super::BlobReader;
-use futures::ready;
-use pin_project_lite::pin_project;
-use std::io;
-use std::task::Poll;
-use tokio::io::AsyncRead;
-use tracing::{debug, instrument, trace, warn};
-
-pin_project! {
-    /// This implements [tokio::io::AsyncSeek] for and [tokio::io::AsyncRead] by
-    /// simply skipping over some bytes, keeping track of the position.
-    /// It fails whenever you try to seek backwards.
-    ///
-    /// ## Pinning concerns:
-    ///
-    /// [NaiveSeeker] is itself pinned by callers, and we do not need to concern
-    /// ourselves regarding that.
-    ///
-    /// Though, its fields as per
-    /// <https://doc.rust-lang.org/std/pin/#pinning-is-not-structural-for-field>
-    /// can be pinned or unpinned.
-    ///
-    /// So we need to go over each field and choose our policy carefully.
-    ///
-    /// The obvious cases are the bookkeeping integers we keep in the structure,
-    /// those are private and not shared to anyone, we never build a
-    /// `Pin<&mut X>` out of them at any point, therefore, we can safely never
-    /// mark them as pinned. Of course, it is expected that no developer here
-    /// attempt to `pin!(self.pos)` to pin them because it makes no sense. If
-    /// they have to become pinned, they should be marked `#[pin]` and we need
-    /// to discuss it.
-    ///
-    /// So the bookkeeping integers are in the right state with respect to their
-    /// pinning status. The projection should offer direct access.
-    ///
-    /// On the `r` field, i.e. a `BufReader<R>`, given that
-    /// <https://docs.rs/tokio/latest/tokio/io/struct.BufReader.html#impl-Unpin-for-BufReader%3CR%3E>
-    /// is available, even a `Pin<&mut BufReader<R>>` can be safely moved.
-    ///
-    /// The only care we should have regards the internal reader itself, i.e.
-    /// the `R` instance, see that Tokio decided to `#[pin]` it too:
-    /// <https://docs.rs/tokio/latest/src/tokio/io/util/buf_reader.rs.html#29>
-    ///
-    /// In general, there's no `Unpin` instance for `R: tokio::io::AsyncRead`
-    /// (see <https://docs.rs/tokio/latest/tokio/io/trait.AsyncRead.html>).
-    ///
-    /// Therefore, we could keep it unpinned and pin it in every call site
-    /// whenever we need to call `poll_*` which can be confusing to the non-
-    /// expert developer and we have a fair share amount of situations where the
-    /// [BufReader] instance is naked, i.e. in its `&mut BufReader<R>`
-    /// form, this is annoying because it could lead to expose the naked `R`
-    /// internal instance somehow and would produce a risk of making it move
-    /// unexpectedly.
-    ///
-    /// We choose the path of the least resistance as we have no reason to have
-    /// access to the raw `BufReader<R>` instance, we just `#[pin]` it too and
-    /// enjoy its `poll_*` safe APIs and push the unpinning concerns to the
-    /// internal implementations themselves, which studied the question longer
-    /// than us.
-    pub struct NaiveSeeker<R: tokio::io::AsyncRead> {
-        #[pin]
-        r: tokio::io::BufReader<R>,
-        pos: u64,
-        bytes_to_skip: u64,
-    }
-}
-
-/// The buffer size used to discard data.
-const DISCARD_BUF_SIZE: usize = 4096;
-
-impl<R: tokio::io::AsyncRead> NaiveSeeker<R> {
-    pub fn new(r: R) -> Self {
-        NaiveSeeker {
-            r: tokio::io::BufReader::new(r),
-            pos: 0,
-            bytes_to_skip: 0,
-        }
-    }
-}
-
-impl<R: tokio::io::AsyncRead> tokio::io::AsyncRead for NaiveSeeker<R> {
-    #[instrument(level = "trace", skip_all)]
-    fn poll_read(
-        self: std::pin::Pin<&mut Self>,
-        cx: &mut std::task::Context<'_>,
-        buf: &mut tokio::io::ReadBuf<'_>,
-    ) -> Poll<std::io::Result<()>> {
-        // The amount of data read can be determined by the increase
-        // in the length of the slice returned by `ReadBuf::filled`.
-        let filled_before = buf.filled().len();
-
-        let this = self.project();
-        ready!(this.r.poll_read(cx, buf))?;
-
-        let bytes_read = buf.filled().len() - filled_before;
-        *this.pos += bytes_read as u64;
-
-        trace!(bytes_read = bytes_read, new_pos = this.pos, "poll_read");
-
-        Ok(()).into()
-    }
-}
-
-impl<R: tokio::io::AsyncRead> tokio::io::AsyncBufRead for NaiveSeeker<R> {
-    fn poll_fill_buf(
-        self: std::pin::Pin<&mut Self>,
-        cx: &mut std::task::Context<'_>,
-    ) -> Poll<io::Result<&[u8]>> {
-        self.project().r.poll_fill_buf(cx)
-    }
-
-    #[instrument(level = "trace", skip(self))]
-    fn consume(self: std::pin::Pin<&mut Self>, amt: usize) {
-        let this = self.project();
-        this.r.consume(amt);
-        *this.pos += amt as u64;
-
-        trace!(new_pos = this.pos, "consume");
-    }
-}
-
-impl<R: tokio::io::AsyncRead> tokio::io::AsyncSeek for NaiveSeeker<R> {
-    #[instrument(level="trace", skip(self), fields(inner_pos=%self.pos), err(Debug))]
-    fn start_seek(
-        self: std::pin::Pin<&mut Self>,
-        position: std::io::SeekFrom,
-    ) -> std::io::Result<()> {
-        let absolute_offset: u64 = match position {
-            io::SeekFrom::Start(start_offset) => {
-                if start_offset < self.pos {
-                    return Err(io::Error::new(
-                        io::ErrorKind::Unsupported,
-                        format!("can't seek backwards ({} -> {})", self.pos, start_offset),
-                    ));
-                } else {
-                    start_offset
-                }
-            }
-            // we don't know the total size, can't support this.
-            io::SeekFrom::End(_end_offset) => {
-                return Err(io::Error::new(
-                    io::ErrorKind::Unsupported,
-                    "can't seek from end",
-                ));
-            }
-            io::SeekFrom::Current(relative_offset) => {
-                if relative_offset < 0 {
-                    return Err(io::Error::new(
-                        io::ErrorKind::Unsupported,
-                        "can't seek backwards relative to current position",
-                    ));
-                } else {
-                    self.pos + relative_offset as u64
-                }
-            }
-        };
-
-        // we already know absolute_offset is >= self.pos
-        debug_assert!(
-            absolute_offset >= self.pos,
-            "absolute_offset {} must be >= self.pos {}",
-            absolute_offset,
-            self.pos
-        );
-
-        // calculate bytes to skip
-        let this = self.project();
-        *this.bytes_to_skip = absolute_offset - *this.pos;
-
-        debug!(bytes_to_skip = *this.bytes_to_skip, "seek");
-
-        Ok(())
-    }
-
-    #[instrument(skip_all)]
-    fn poll_complete(
-        mut self: std::pin::Pin<&mut Self>,
-        cx: &mut std::task::Context<'_>,
-    ) -> Poll<std::io::Result<u64>> {
-        if self.bytes_to_skip == 0 {
-            // return the new position (from the start of the stream)
-            return Poll::Ready(Ok(self.pos));
-        }
-
-        // discard some bytes, until pos is where we want it to be.
-        // We create a buffer that we'll discard later on.
-        let mut discard_buf = [0; DISCARD_BUF_SIZE];
-
-        // Loop until we've reached the desired seek position. This is done by issuing repeated
-        // `poll_read` calls.
-        // If the data is not available yet, we will yield back to the executor
-        // and wait to be polled again.
-        loop {
-            if self.bytes_to_skip == 0 {
-                return Poll::Ready(Ok(self.pos));
-            }
-
-            // calculate the length we want to skip at most, which is either a max
-            // buffer size, or the number of remaining bytes to read, whatever is
-            // smaller.
-            let bytes_to_skip_now = std::cmp::min(self.bytes_to_skip as usize, discard_buf.len());
-            let mut discard_buf = tokio::io::ReadBuf::new(&mut discard_buf[..bytes_to_skip_now]);
-
-            ready!(self.as_mut().poll_read(cx, &mut discard_buf))?;
-            let bytes_skipped = discard_buf.filled().len();
-
-            if bytes_skipped == 0 {
-                return Poll::Ready(Err(io::Error::new(
-                    io::ErrorKind::UnexpectedEof,
-                    "got EOF while trying to skip bytes",
-                )));
-            }
-            // decrement bytes to skip. The poll_read call already updated self.pos.
-            *self.as_mut().project().bytes_to_skip -= bytes_skipped as u64;
-        }
-    }
-}
-
-impl<R: tokio::io::AsyncRead + Send + Unpin + 'static> BlobReader for NaiveSeeker<R> {}
-
-#[cfg(test)]
-mod tests {
-    use super::{NaiveSeeker, DISCARD_BUF_SIZE};
-    use std::io::{Cursor, SeekFrom};
-    use tokio::io::{AsyncReadExt, AsyncSeekExt};
-
-    /// This seek requires multiple `poll_read` as we use a multiples of
-    /// DISCARD_BUF_SIZE when doing the seek.
-    /// This ensures we don't hang indefinitely.
-    #[tokio::test]
-    async fn seek() {
-        let buf = vec![0u8; DISCARD_BUF_SIZE * 4];
-        let reader = Cursor::new(&buf);
-        let mut seeker = NaiveSeeker::new(reader);
-        seeker.seek(SeekFrom::Start(4000)).await.unwrap();
-    }
-
-    #[tokio::test]
-    async fn seek_read() {
-        let mut buf = vec![0u8; DISCARD_BUF_SIZE * 2];
-        buf.extend_from_slice(&[1u8; DISCARD_BUF_SIZE * 2]);
-        buf.extend_from_slice(&[2u8; DISCARD_BUF_SIZE * 2]);
-
-        let reader = Cursor::new(&buf);
-        let mut seeker = NaiveSeeker::new(reader);
-
-        let mut read_buf = vec![0u8; DISCARD_BUF_SIZE];
-        seeker.read_exact(&mut read_buf).await.expect("must read");
-        assert_eq!(read_buf.as_slice(), &[0u8; DISCARD_BUF_SIZE]);
-
-        seeker
-            .seek(SeekFrom::Current(DISCARD_BUF_SIZE as i64))
-            .await
-            .expect("must seek");
-        seeker.read_exact(&mut read_buf).await.expect("must read");
-        assert_eq!(read_buf.as_slice(), &[1u8; DISCARD_BUF_SIZE]);
-
-        seeker
-            .seek(SeekFrom::Start(2 * 2 * DISCARD_BUF_SIZE as u64))
-            .await
-            .expect("must seek");
-        seeker.read_exact(&mut read_buf).await.expect("must read");
-        assert_eq!(read_buf.as_slice(), &[2u8; DISCARD_BUF_SIZE]);
-    }
-}
diff --git a/tvix/castore/src/blobservice/object_store.rs b/tvix/castore/src/blobservice/object_store.rs
index d2d0a288a557..5bb05cf26123 100644
--- a/tvix/castore/src/blobservice/object_store.rs
+++ b/tvix/castore/src/blobservice/object_store.rs
@@ -1,4 +1,5 @@
 use std::{
+    collections::HashMap,
     io::{self, Cursor},
     pin::pin,
     sync::Arc,
@@ -18,22 +19,13 @@ use tracing::{debug, instrument, trace, Level};
 use url::Url;
 
 use crate::{
+    composition::{CompositionContext, ServiceBuilder},
     proto::{stat_blob_response::ChunkMeta, StatBlobResponse},
-    B3Digest, B3HashingReader,
+    B3Digest, B3HashingReader, Error,
 };
 
 use super::{BlobReader, BlobService, BlobWriter, ChunkedReader};
 
-#[derive(Clone)]
-pub struct ObjectStoreBlobService {
-    object_store: Arc<dyn ObjectStore>,
-    base_path: Path,
-
-    /// Average chunk size for FastCDC, in bytes.
-    /// min value is half, max value double of that number.
-    avg_chunk_size: u32,
-}
-
 /// Uses any object storage supported by the [object_store] crate to provide a
 /// tvix-castore [BlobService].
 ///
@@ -70,31 +62,14 @@ pub struct ObjectStoreBlobService {
 /// It also allows signalling any compression of chunks in the content-type.
 /// Migration *should* be possible by simply adding the right content-types to
 /// all keys stored so far, but no promises ;-)
-impl ObjectStoreBlobService {
-    /// Constructs a new [ObjectStoreBlobService] from a [Url] supported by
-    /// [object_store].
-    /// Any path suffix becomes the base path of the object store.
-    /// additional options, the same as in [object_store::parse_url_opts] can
-    /// be passed.
-    pub fn parse_url_opts<I, K, V>(url: &Url, options: I) -> Result<Self, object_store::Error>
-    where
-        I: IntoIterator<Item = (K, V)>,
-        K: AsRef<str>,
-        V: Into<String>,
-    {
-        let (object_store, path) = object_store::parse_url_opts(url, options)?;
-
-        Ok(Self {
-            object_store: Arc::new(object_store),
-            base_path: path,
-            avg_chunk_size: 256 * 1024,
-        })
-    }
+#[derive(Clone)]
+pub struct ObjectStoreBlobService {
+    object_store: Arc<dyn ObjectStore>,
+    base_path: Path,
 
-    /// Like [Self::parse_url_opts], except without the options.
-    pub fn parse_url(url: &Url) -> Result<Self, object_store::Error> {
-        Self::parse_url_opts(url, Vec::<(String, String)>::new())
-    }
+    /// Average chunk size for FastCDC, in bytes.
+    /// min value is half, max value double of that number.
+    avg_chunk_size: u32,
 }
 
 #[instrument(level=Level::TRACE, skip_all,fields(base_path=%base_path,blob.digest=%digest),ret(Display))]
@@ -269,6 +244,71 @@ impl BlobService for ObjectStoreBlobService {
     }
 }
 
+fn default_avg_chunk_size() -> u32 {
+    256 * 1024
+}
+
+#[derive(serde::Deserialize)]
+#[serde(deny_unknown_fields)]
+pub struct ObjectStoreBlobServiceConfig {
+    object_store_url: String,
+    #[serde(default = "default_avg_chunk_size")]
+    avg_chunk_size: u32,
+    object_store_options: HashMap<String, String>,
+}
+
+impl TryFrom<url::Url> for ObjectStoreBlobServiceConfig {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    /// Constructs a new [ObjectStoreBlobService] from a [Url] supported by
+    /// [object_store].
+    /// Any path suffix becomes the base path of the object store.
+    /// additional options, the same as in [object_store::parse_url_opts] can
+    /// be passed.
+    fn try_from(url: url::Url) -> Result<Self, Self::Error> {
+        // We need to convert the URL to string, strip the prefix there, and then
+        // parse it back as url, as Url::set_scheme() rejects some of the transitions we want to do.
+        let trimmed_url = {
+            let s = url.to_string();
+            let mut url = Url::parse(
+                s.strip_prefix("objectstore+")
+                    .ok_or(Error::StorageError("Missing objectstore uri".into()))?,
+            )?;
+            // trim the query pairs, they might contain credentials or local settings we don't want to send as-is.
+            url.set_query(None);
+            url
+        };
+        Ok(ObjectStoreBlobServiceConfig {
+            object_store_url: trimmed_url.into(),
+            object_store_options: url
+                .query_pairs()
+                .into_iter()
+                .map(|(k, v)| (k.to_string(), v.to_string()))
+                .collect(),
+            avg_chunk_size: 256 * 1024,
+        })
+    }
+}
+
+#[async_trait]
+impl ServiceBuilder for ObjectStoreBlobServiceConfig {
+    type Output = dyn BlobService;
+    async fn build<'a>(
+        &'a self,
+        _instance_name: &str,
+        _context: &CompositionContext,
+    ) -> Result<Arc<dyn BlobService>, Box<dyn std::error::Error + Send + Sync + 'static>> {
+        let (object_store, path) = object_store::parse_url_opts(
+            &self.object_store_url.parse()?,
+            &self.object_store_options,
+        )?;
+        Ok(Arc::new(ObjectStoreBlobService {
+            object_store: Arc::new(object_store),
+            base_path: path,
+            avg_chunk_size: self.avg_chunk_size,
+        }))
+    }
+}
+
 /// Reads blob contents from a AsyncRead, chunks and uploads them.
 /// On success, returns a [StatBlobResponse] pointing to the individual chunks.
 #[instrument(skip_all, fields(base_path=%base_path, min_chunk_size, avg_chunk_size, max_chunk_size), err)]
@@ -309,6 +349,15 @@ async fn chunk_and_upload<R: AsyncRead + Unpin>(
         .collect::<io::Result<Vec<ChunkMeta>>>()
         .await?;
 
+    let chunks = if chunks.len() < 2 {
+        // The chunker returned only one chunk, which is the entire blob.
+        // According to the protocol, we must return an empty list of chunks
+        // when the blob is not split up further.
+        vec![]
+    } else {
+        chunks
+    };
+
     let stat_blob_response = StatBlobResponse {
         chunks,
         bao: "".into(), // still todo
@@ -512,24 +561,35 @@ where
 
 #[cfg(test)]
 mod test {
-    use super::chunk_and_upload;
+    use super::{chunk_and_upload, default_avg_chunk_size};
     use crate::{
         blobservice::{BlobService, ObjectStoreBlobService},
-        fixtures::{BLOB_A, BLOB_A_DIGEST},
+        fixtures::{BLOB_A, BLOB_A_DIGEST, BLOB_B, BLOB_B_DIGEST},
     };
     use std::{io::Cursor, sync::Arc};
     use url::Url;
 
     /// Tests chunk_and_upload directly, bypassing the BlobWriter at open_write().
+    #[rstest::rstest]
+    #[case::a(&BLOB_A, &BLOB_A_DIGEST)]
+    #[case::b(&BLOB_B, &BLOB_B_DIGEST)]
     #[tokio::test]
-    async fn test_chunk_and_upload() {
-        let blobsvc = Arc::new(
-            ObjectStoreBlobService::parse_url(&Url::parse("memory:///").unwrap()).unwrap(),
-        );
-
-        let blob_digest = chunk_and_upload(
-            &mut Cursor::new(BLOB_A.to_vec()),
-            blobsvc.object_store.clone(),
+    async fn test_chunk_and_upload(
+        #[case] blob: &bytes::Bytes,
+        #[case] blob_digest: &crate::B3Digest,
+    ) {
+        let (object_store, base_path) =
+            object_store::parse_url(&Url::parse("memory:///").unwrap()).unwrap();
+        let object_store: Arc<dyn object_store::ObjectStore> = Arc::from(object_store);
+        let blobsvc = Arc::new(ObjectStoreBlobService {
+            object_store: object_store.clone(),
+            avg_chunk_size: default_avg_chunk_size(),
+            base_path,
+        });
+
+        let inserted_blob_digest = chunk_and_upload(
+            &mut Cursor::new(blob.to_vec()),
+            object_store,
             object_store::path::Path::from("/"),
             1024 / 2,
             1024,
@@ -538,9 +598,20 @@ mod test {
         .await
         .expect("chunk_and_upload succeeds");
 
-        assert_eq!(BLOB_A_DIGEST.clone(), blob_digest);
+        assert_eq!(blob_digest.clone(), inserted_blob_digest);
 
         // Now we should have the blob
-        assert!(blobsvc.has(&BLOB_A_DIGEST).await.unwrap());
+        assert!(blobsvc.has(blob_digest).await.unwrap());
+
+        // Check if it was chunked correctly
+        let chunks = blobsvc.chunks(blob_digest).await.unwrap().unwrap();
+        if blob.len() < 1024 / 2 {
+            // The blob is smaller than the min chunk size, it should have been inserted as a whole
+            assert!(chunks.is_empty());
+        } else if blob.len() > 1024 * 2 {
+            // The blob is larger than the max chunk size, make sure it was split up into at least
+            // two chunks
+            assert!(chunks.len() >= 2);
+        }
     }
 }
diff --git a/tvix/castore/src/blobservice/tests/utils.rs b/tvix/castore/src/blobservice/tests/utils.rs
index 706c4b5e4319..7df4f00d3a09 100644
--- a/tvix/castore/src/blobservice/tests/utils.rs
+++ b/tvix/castore/src/blobservice/tests/utils.rs
@@ -2,6 +2,7 @@ use crate::blobservice::{BlobService, MemoryBlobService};
 use crate::proto::blob_service_client::BlobServiceClient;
 use crate::proto::GRPCBlobServiceWrapper;
 use crate::{blobservice::GRPCBlobService, proto::blob_service_server::BlobServiceServer};
+use hyper_util::rt::TokioIo;
 use tonic::transport::{Endpoint, Server, Uri};
 
 /// Constructs and returns a gRPC BlobService.
@@ -33,7 +34,7 @@ pub async fn make_grpc_blob_service_client() -> Box<dyn BlobService> {
             .unwrap()
             .connect_with_connector(tower::service_fn(move |_: Uri| {
                 let right = maybe_right.take().unwrap();
-                async move { Ok::<_, std::io::Error>(right) }
+                async move { Ok::<_, std::io::Error>(TokioIo::new(right)) }
             }))
             .await
             .unwrap(),
diff --git a/tvix/castore/src/composition.rs b/tvix/castore/src/composition.rs
new file mode 100644
index 000000000000..c76daafc523d
--- /dev/null
+++ b/tvix/castore/src/composition.rs
@@ -0,0 +1,541 @@
+//! The composition module allows composing different kinds of services based on a set of service
+//! configurations _at runtime_.
+//!
+//! Store configs are deserialized with serde. The registry provides a stateful mapping from the
+//! `type` tag of an internally tagged enum on the serde side to a Config struct which is
+//! deserialized and then returned as a `Box<dyn ServiceBuilder<Output = dyn BlobService>>`
+//! (the same for DirectoryService instead of BlobService etc).
+//!
+//! ### Example 1.: Implementing a new BlobService
+//!
+//! You need a Config struct which implements `DeserializeOwned` and
+//! `ServiceBuilder<Output = dyn BlobService>`.
+//! Provide the user with a function to call with
+//! their registry. You register your new type as:
+//!
+//! ```
+//! use std::sync::Arc;
+//!
+//! use tvix_castore::composition::*;
+//! use tvix_castore::blobservice::BlobService;
+//!
+//! #[derive(serde::Deserialize)]
+//! struct MyBlobServiceConfig {
+//! }
+//!
+//! #[tonic::async_trait]
+//! impl ServiceBuilder for MyBlobServiceConfig {
+//!     type Output = dyn BlobService;
+//!     async fn build(&self, _: &str, _: &CompositionContext) -> Result<Arc<Self::Output>, Box<dyn std::error::Error + Send + Sync + 'static>> {
+//!         todo!()
+//!     }
+//! }
+//!
+//! impl TryFrom<url::Url> for MyBlobServiceConfig {
+//!     type Error = Box<dyn std::error::Error + Send + Sync>;
+//!     fn try_from(url: url::Url) -> Result<Self, Self::Error> {
+//!         todo!()
+//!     }
+//! }
+//!
+//! pub fn add_my_service(reg: &mut Registry) {
+//!     reg.register::<Box<dyn ServiceBuilder<Output = dyn BlobService>>, MyBlobServiceConfig>("myblobservicetype");
+//! }
+//! ```
+//!
+//! Now, when a user deserializes a store config with the type tag "myblobservicetype" into a
+//! `Box<dyn ServiceBuilder<Output = Arc<dyn BlobService>>>`, it will be done via `MyBlobServiceConfig`.
+//!
+//! ### Example 2.: Composing stores to get one store
+//!
+//! ```
+//! use std::sync::Arc;
+//! use tvix_castore::composition::*;
+//! use tvix_castore::blobservice::BlobService;
+//!
+//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
+//! # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async move {
+//! let blob_services_configs_json = serde_json::json!({
+//!   "blobstore1": {
+//!     "type": "memory"
+//!   },
+//!   "blobstore2": {
+//!     "type": "memory"
+//!   },
+//!   "default": {
+//!     "type": "combined",
+//!     "local": "blobstore1",
+//!     "remote": "blobstore2"
+//!   }
+//! });
+//!
+//! let blob_services_configs = with_registry(&REG, || serde_json::from_value(blob_services_configs_json))?;
+//! let mut blob_service_composition = Composition::default();
+//! blob_service_composition.extend_with_configs::<dyn BlobService>(blob_services_configs);
+//! let blob_service: Arc<dyn BlobService> = blob_service_composition.build("default").await?;
+//! # Ok(())
+//! # })
+//! # }
+//! ```
+//!
+//! ### Example 3.: Creating another registry extending the default registry with third-party types
+//!
+//! ```
+//! # pub fn add_my_service(reg: &mut tvix_castore::composition::Registry) {}
+//! let mut my_registry = tvix_castore::composition::Registry::default();
+//! tvix_castore::composition::add_default_services(&mut my_registry);
+//! add_my_service(&mut my_registry);
+//! ```
+//!
+//! Continue with Example 2, with my_registry instead of REG
+
+use erased_serde::deserialize;
+use futures::future::BoxFuture;
+use futures::FutureExt;
+use lazy_static::lazy_static;
+use serde::de::DeserializeOwned;
+use serde_tagged::de::{BoxFnSeed, SeedFactory};
+use serde_tagged::util::TagString;
+use std::any::{Any, TypeId};
+use std::cell::Cell;
+use std::collections::BTreeMap;
+use std::collections::HashMap;
+use std::marker::PhantomData;
+use std::sync::Arc;
+use tonic::async_trait;
+
+/// Resolves tag names to the corresponding Config type.
+// Registry implementation details:
+// This is really ugly. Really we would want to store this as a generic static field:
+//
+// ```
+// struct Registry<T>(BTreeMap<(&'static str), RegistryEntry<T>);
+// static REG<T>: Registry<T>;
+// ```
+//
+// so that one version of the static is generated for each Type that the registry is accessed for.
+// However, this is not possible, because generics are only a thing in functions, and even there
+// they will not interact with static items:
+// https://doc.rust-lang.org/reference/items/static-items.html#statics--generics
+//
+// So instead, we make this lookup at runtime by putting the TypeId into the key.
+// But now we can no longer store the `BoxFnSeed<T>` because we are lacking the generic parameter
+// T, so instead store it as `Box<dyn Any>` and downcast to `&BoxFnSeed<T>` when performing the
+// lookup.
+// I said it was ugly...
+#[derive(Default)]
+pub struct Registry(BTreeMap<(TypeId, &'static str), Box<dyn Any + Sync>>);
+pub type FromUrlSeed<T> =
+    Box<dyn Fn(url::Url) -> Result<T, Box<dyn std::error::Error + Send + Sync>> + Sync>;
+pub struct RegistryEntry<T> {
+    serde_deserialize_seed: BoxFnSeed<DeserializeWithRegistry<T>>,
+    from_url_seed: FromUrlSeed<DeserializeWithRegistry<T>>,
+}
+
+struct RegistryWithFakeType<'r, T>(&'r Registry, PhantomData<T>);
+
+impl<'r, 'de: 'r, T: 'static> SeedFactory<'de, TagString<'de>> for RegistryWithFakeType<'r, T> {
+    type Value = DeserializeWithRegistry<T>;
+    type Seed = &'r BoxFnSeed<Self::Value>;
+
+    // Required method
+    fn seed<E>(self, tag: TagString<'de>) -> Result<Self::Seed, E>
+    where
+        E: serde::de::Error,
+    {
+        // using find() and not get() because of https://github.com/rust-lang/rust/issues/80389
+        let seed: &Box<dyn Any + Sync> = self
+            .0
+             .0
+            .iter()
+            .find(|(k, _)| *k == &(TypeId::of::<T>(), tag.as_ref()))
+            .ok_or_else(|| serde::de::Error::custom(format!("Unknown type: {}", tag)))?
+            .1;
+
+        let entry: &RegistryEntry<T> = <dyn Any>::downcast_ref(&**seed).unwrap();
+
+        Ok(&entry.serde_deserialize_seed)
+    }
+}
+
+/// Wrapper type which implements Deserialize using the registry
+///
+/// Wrap your type in this in order to deserialize it using a registry, e.g.
+/// `RegistryWithFakeType<Box<dyn MyTrait>>`, then the types registered for `Box<dyn MyTrait>`
+/// will be used.
+pub struct DeserializeWithRegistry<T>(pub T);
+
+impl Registry {
+    /// Registers a mapping from type tag to a concrete type into the registry.
+    ///
+    /// The type parameters are very important:
+    /// After calling `register::<Box<dyn FooTrait>, FooStruct>("footype")`, when a user
+    /// deserializes into an input with the type tag "myblobservicetype" into a
+    /// `Box<dyn FooTrait>`, it will first call the Deserialize imple of `FooStruct` and
+    /// then convert it into a `Box<dyn FooTrait>` using From::from.
+    pub fn register<
+        T: 'static,
+        C: DeserializeOwned
+            + TryFrom<url::Url, Error = Box<dyn std::error::Error + Send + Sync>>
+            + Into<T>,
+    >(
+        &mut self,
+        type_name: &'static str,
+    ) {
+        self.0.insert(
+            (TypeId::of::<T>(), type_name),
+            Box::new(RegistryEntry {
+                serde_deserialize_seed: BoxFnSeed::new(|x| {
+                    deserialize::<C>(x)
+                        .map(Into::into)
+                        .map(DeserializeWithRegistry)
+                }),
+                from_url_seed: Box::new(|url| {
+                    C::try_from(url)
+                        .map(Into::into)
+                        .map(DeserializeWithRegistry)
+                }),
+            }),
+        );
+    }
+}
+
+impl<'de, T: 'static> serde::Deserialize<'de> for DeserializeWithRegistry<T> {
+    fn deserialize<D>(de: D) -> std::result::Result<Self, D::Error>
+    where
+        D: serde::Deserializer<'de>,
+    {
+        serde_tagged::de::internal::deserialize(
+            de,
+            "type",
+            RegistryWithFakeType(ACTIVE_REG.get().unwrap(), PhantomData::<T>),
+        )
+    }
+}
+
+#[derive(Debug, thiserror::Error)]
+enum TryFromUrlError {
+    #[error("Unknown type: {0}")]
+    UnknownTag(String),
+}
+
+impl<T: 'static> TryFrom<url::Url> for DeserializeWithRegistry<T> {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    fn try_from(url: url::Url) -> Result<Self, Self::Error> {
+        let tag = url.scheme().split('+').next().unwrap();
+        // same as in the SeedFactory impl: using find() and not get() because of https://github.com/rust-lang/rust/issues/80389
+        let seed = ACTIVE_REG
+            .get()
+            .unwrap()
+            .0
+            .iter()
+            .find(|(k, _)| *k == &(TypeId::of::<T>(), tag))
+            .ok_or_else(|| Box::new(TryFromUrlError::UnknownTag(tag.into())))?
+            .1;
+        let entry: &RegistryEntry<T> = <dyn Any>::downcast_ref(&**seed).unwrap();
+        (entry.from_url_seed)(url)
+    }
+}
+
+thread_local! {
+    /// The active Registry is global state, because there is no convenient and universal way to pass state
+    /// into the functions usually used for deserialization, e.g. `serde_json::from_str`, `toml::from_str`,
+    /// `serde_qs::from_str`.
+    static ACTIVE_REG: Cell<Option<&'static Registry>> = panic!("reg was accessed before initialization");
+}
+
+/// Run the provided closure with a registry context.
+/// Any serde deserialize calls within the closure will use the registry to resolve tag names to
+/// the corresponding Config type.
+pub fn with_registry<R>(reg: &'static Registry, f: impl FnOnce() -> R) -> R {
+    ACTIVE_REG.set(Some(reg));
+    let result = f();
+    ACTIVE_REG.set(None);
+    result
+}
+
+lazy_static! {
+    /// The provided registry of tvix_castore, with all builtin BlobStore/DirectoryStore implementations
+    pub static ref REG: Registry = {
+        let mut reg = Default::default();
+        add_default_services(&mut reg);
+        reg
+    };
+}
+
+// ---------- End of generic registry code --------- //
+
+/// Register the builtin services of tvix_castore with the given registry.
+/// This is useful for creating your own registry with the builtin types _and_
+/// extra third party types.
+pub fn add_default_services(reg: &mut Registry) {
+    crate::blobservice::register_blob_services(reg);
+    crate::directoryservice::register_directory_services(reg);
+}
+
+pub struct CompositionContext<'a> {
+    // The stack used to detect recursive instantiations and prevent deadlocks
+    // The TypeId of the trait object is included to distinguish e.g. the
+    // BlobService "default" and the DirectoryService "default".
+    stack: Vec<(TypeId, String)>,
+    composition: Option<&'a Composition>,
+}
+
+impl<'a> CompositionContext<'a> {
+    pub fn blank() -> Self {
+        Self {
+            stack: Default::default(),
+            composition: None,
+        }
+    }
+
+    pub async fn resolve<T: ?Sized + Send + Sync + 'static>(
+        &self,
+        entrypoint: String,
+    ) -> Result<Arc<T>, Box<dyn std::error::Error + Send + Sync + 'static>> {
+        // disallow recursion
+        if self
+            .stack
+            .contains(&(TypeId::of::<T>(), entrypoint.clone()))
+        {
+            return Err(CompositionError::Recursion(
+                self.stack.iter().map(|(_, n)| n.clone()).collect(),
+            )
+            .into());
+        }
+        match self.composition {
+            Some(comp) => Ok(comp.build_internal(self.stack.clone(), entrypoint).await?),
+            None => Err(CompositionError::NotFound(entrypoint).into()),
+        }
+    }
+}
+
+#[async_trait]
+/// This is the trait usually implemented on a per-store-type Config struct and
+/// used to instantiate it.
+pub trait ServiceBuilder: Send + Sync {
+    type Output: ?Sized;
+    async fn build(
+        &self,
+        instance_name: &str,
+        context: &CompositionContext,
+    ) -> Result<Arc<Self::Output>, Box<dyn std::error::Error + Send + Sync + 'static>>;
+}
+
+impl<T: ?Sized, S: ServiceBuilder<Output = T> + 'static> From<S>
+    for Box<dyn ServiceBuilder<Output = T>>
+{
+    fn from(t: S) -> Self {
+        Box::new(t)
+    }
+}
+
+enum InstantiationState<T: ?Sized> {
+    Config(Box<dyn ServiceBuilder<Output = T>>),
+    InProgress(tokio::sync::watch::Receiver<Option<Result<Arc<T>, CompositionError>>>),
+    Done(Result<Arc<T>, CompositionError>),
+}
+
+#[derive(Default)]
+pub struct Composition {
+    stores: std::sync::Mutex<HashMap<(TypeId, String), Box<dyn Any + Send + Sync>>>,
+}
+
+#[derive(thiserror::Error, Clone, Debug)]
+pub enum CompositionError {
+    #[error("store not found: {0}")]
+    NotFound(String),
+    #[error("recursion not allowed {0:?}")]
+    Recursion(Vec<String>),
+    #[error("store construction panicked {0}")]
+    Poisoned(String),
+    #[error("instantiation of service {0} failed: {1}")]
+    Failed(String, Arc<dyn std::error::Error + Send + Sync>),
+}
+
+impl<T: ?Sized + Send + Sync + 'static>
+    Extend<(
+        String,
+        DeserializeWithRegistry<Box<dyn ServiceBuilder<Output = T>>>,
+    )> for Composition
+{
+    fn extend<I>(&mut self, configs: I)
+    where
+        I: IntoIterator<
+            Item = (
+                String,
+                DeserializeWithRegistry<Box<dyn ServiceBuilder<Output = T>>>,
+            ),
+        >,
+    {
+        self.stores
+            .lock()
+            .unwrap()
+            .extend(configs.into_iter().map(|(k, v)| {
+                (
+                    (TypeId::of::<T>(), k),
+                    Box::new(InstantiationState::Config(v.0)) as Box<dyn Any + Send + Sync>,
+                )
+            }))
+    }
+}
+
+impl Composition {
+    pub fn extend_with_configs<T: ?Sized + Send + Sync + 'static>(
+        &mut self,
+        // Keep the concrete `HashMap` type here since it allows for type
+        // inference of what type is previously being deserialized.
+        configs: HashMap<String, DeserializeWithRegistry<Box<dyn ServiceBuilder<Output = T>>>>,
+    ) {
+        self.extend(configs);
+    }
+
+    pub async fn build<T: ?Sized + Send + Sync + 'static>(
+        &self,
+        entrypoint: &str,
+    ) -> Result<Arc<T>, CompositionError> {
+        self.build_internal(vec![], entrypoint.to_string()).await
+    }
+
+    fn build_internal<T: ?Sized + Send + Sync + 'static>(
+        &self,
+        stack: Vec<(TypeId, String)>,
+        entrypoint: String,
+    ) -> BoxFuture<'_, Result<Arc<T>, CompositionError>> {
+        let mut stores = self.stores.lock().unwrap();
+        let entry = match stores.get_mut(&(TypeId::of::<T>(), entrypoint.clone())) {
+            Some(v) => v,
+            None => return Box::pin(futures::future::err(CompositionError::NotFound(entrypoint))),
+        };
+        // for lifetime reasons, we put a placeholder value in the hashmap while we figure out what
+        // the new value should be. the Mutex stays locked the entire time, so nobody will ever see
+        // this temporary value.
+        let prev_val = std::mem::replace(
+            entry,
+            Box::new(InstantiationState::<T>::Done(Err(
+                CompositionError::Poisoned(entrypoint.clone()),
+            ))),
+        );
+        let (new_val, ret) = match *prev_val.downcast::<InstantiationState<T>>().unwrap() {
+            InstantiationState::Done(service) => (
+                InstantiationState::Done(service.clone()),
+                futures::future::ready(service).boxed(),
+            ),
+            // the construction of the store has not started yet.
+            InstantiationState::Config(config) => {
+                let (tx, rx) = tokio::sync::watch::channel(None);
+                (
+                    InstantiationState::InProgress(rx),
+                    (async move {
+                        let mut new_context = CompositionContext {
+                            stack: stack.clone(),
+                            composition: Some(self),
+                        };
+                        new_context
+                            .stack
+                            .push((TypeId::of::<T>(), entrypoint.clone()));
+                        let res =
+                            config.build(&entrypoint, &new_context).await.map_err(|e| {
+                                match e.downcast() {
+                                    Ok(e) => *e,
+                                    Err(e) => CompositionError::Failed(entrypoint, e.into()),
+                                }
+                            });
+                        tx.send(Some(res.clone())).unwrap();
+                        res
+                    })
+                    .boxed(),
+                )
+            }
+            // there is already a task driving forward the construction of this store, wait for it
+            // to notify us via the provided channel
+            InstantiationState::InProgress(mut recv) => {
+                (InstantiationState::InProgress(recv.clone()), {
+                    (async move {
+                        loop {
+                            if let Some(v) =
+                                recv.borrow_and_update().as_ref().map(|res| res.clone())
+                            {
+                                break v;
+                            }
+                            recv.changed().await.unwrap();
+                        }
+                    })
+                    .boxed()
+                })
+            }
+        };
+        *entry = Box::new(new_val);
+        ret
+    }
+
+    pub fn context(&self) -> CompositionContext {
+        CompositionContext {
+            stack: vec![],
+            composition: Some(self),
+        }
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use super::*;
+    use crate::blobservice::BlobService;
+    use std::sync::Arc;
+
+    /// Test that we return a reference to the same instance of MemoryBlobService (via ptr_eq)
+    /// when instantiating the same entrypoint twice. By instantiating concurrently, we also
+    /// test the channels notifying the second consumer when the store has been instantiated.
+    #[tokio::test]
+    async fn concurrent() {
+        let blob_services_configs_json = serde_json::json!({
+            "default": {
+                "type": "memory",
+            }
+        });
+
+        let blob_services_configs =
+            with_registry(&REG, || serde_json::from_value(blob_services_configs_json)).unwrap();
+        let mut blob_service_composition = Composition::default();
+        blob_service_composition.extend_with_configs::<dyn BlobService>(blob_services_configs);
+        let (blob_service1, blob_service2) = tokio::join!(
+            blob_service_composition.build::<dyn BlobService>("default"),
+            blob_service_composition.build::<dyn BlobService>("default")
+        );
+        assert!(Arc::ptr_eq(
+            &blob_service1.unwrap(),
+            &blob_service2.unwrap()
+        ));
+    }
+
+    /// Test that we throw the correct error when an instantiation would recurse (deadlock)
+    #[tokio::test]
+    async fn reject_recursion() {
+        let blob_services_configs_json = serde_json::json!({
+            "default": {
+                "type": "combined",
+                "local": "other",
+                "remote": "other"
+            },
+            "other": {
+                "type": "combined",
+                "local": "default",
+                "remote": "default"
+            }
+        });
+
+        let blob_services_configs =
+            with_registry(&REG, || serde_json::from_value(blob_services_configs_json)).unwrap();
+        let mut blob_service_composition = Composition::default();
+        blob_service_composition.extend_with_configs::<dyn BlobService>(blob_services_configs);
+        match blob_service_composition
+            .build::<dyn BlobService>("default")
+            .await
+        {
+            Err(CompositionError::Recursion(stack)) => {
+                assert_eq!(stack, vec!["default".to_string(), "other".to_string()])
+            }
+            other => panic!("should have returned an error, returned: {:?}", other.err()),
+        }
+    }
+}
diff --git a/tvix/castore/src/digests.rs b/tvix/castore/src/digests.rs
index 2311c95c4ddc..4d919ff0d873 100644
--- a/tvix/castore/src/digests.rs
+++ b/tvix/castore/src/digests.rs
@@ -6,7 +6,7 @@ use thiserror::Error;
 pub struct B3Digest(Bytes);
 
 // TODO: allow converting these errors to crate::Error
-#[derive(Error, Debug)]
+#[derive(Error, Debug, PartialEq)]
 pub enum Error {
     #[error("invalid digest length: {0}")]
     InvalidDigestLen(usize),
@@ -26,6 +26,11 @@ impl From<B3Digest> for bytes::Bytes {
     }
 }
 
+impl From<blake3::Hash> for B3Digest {
+    fn from(value: blake3::Hash) -> Self {
+        Self(Bytes::copy_from_slice(value.as_bytes()))
+    }
+}
 impl From<digest::Output<blake3::Hasher>> for B3Digest {
     fn from(value: digest::Output<blake3::Hasher>) -> Self {
         let v = Into::<[u8; B3_LEN]>::into(value);
@@ -67,6 +72,12 @@ impl From<&[u8; B3_LEN]> for B3Digest {
     }
 }
 
+impl From<B3Digest> for [u8; B3_LEN] {
+    fn from(value: B3Digest) -> Self {
+        value.0.to_vec().try_into().unwrap()
+    }
+}
+
 impl Clone for B3Digest {
     fn clone(&self) -> Self {
         Self(self.0.to_owned())
diff --git a/tvix/castore/src/directoryservice/bigtable.rs b/tvix/castore/src/directoryservice/bigtable.rs
index 1194c6ddc999..73ab4342d832 100644
--- a/tvix/castore/src/directoryservice/bigtable.rs
+++ b/tvix/castore/src/directoryservice/bigtable.rs
@@ -5,10 +5,14 @@ use futures::stream::BoxStream;
 use prost::Message;
 use serde::{Deserialize, Serialize};
 use serde_with::{serde_as, DurationSeconds};
+use std::sync::Arc;
 use tonic::async_trait;
 use tracing::{instrument, trace, warn};
 
-use super::{utils::traverse_directory, DirectoryPutter, DirectoryService, SimplePutter};
+use super::{
+    utils::traverse_directory, Directory, DirectoryPutter, DirectoryService, SimplePutter,
+};
+use crate::composition::{CompositionContext, ServiceBuilder};
 use crate::{proto, B3Digest, Error};
 
 /// There should not be more than 10 MiB in a single cell.
@@ -43,41 +47,6 @@ pub struct BigtableDirectoryService {
     emulator: std::sync::Arc<(tempfile::TempDir, async_process::Child)>,
 }
 
-/// Represents configuration of [BigtableDirectoryService].
-/// This currently conflates both connect parameters and data model/client
-/// behaviour parameters.
-#[serde_as]
-#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
-pub struct BigtableParameters {
-    project_id: String,
-    instance_name: String,
-    #[serde(default)]
-    is_read_only: bool,
-    #[serde(default = "default_channel_size")]
-    channel_size: usize,
-
-    #[serde_as(as = "Option<DurationSeconds<String>>")]
-    #[serde(default = "default_timeout")]
-    timeout: Option<std::time::Duration>,
-    table_name: String,
-    family_name: String,
-
-    #[serde(default = "default_app_profile_id")]
-    app_profile_id: String,
-}
-
-fn default_app_profile_id() -> String {
-    "default".to_owned()
-}
-
-fn default_channel_size() -> usize {
-    4
-}
-
-fn default_timeout() -> Option<std::time::Duration> {
-    Some(std::time::Duration::from_secs(4))
-}
-
 impl BigtableDirectoryService {
     #[cfg(not(test))]
     pub async fn connect(params: BigtableParameters) -> Result<Self, bigtable::Error> {
@@ -182,7 +151,7 @@ fn derive_directory_key(digest: &B3Digest) -> String {
 #[async_trait]
 impl DirectoryService for BigtableDirectoryService {
     #[instrument(skip(self, digest), err, fields(directory.digest = %digest))]
-    async fn get(&self, digest: &B3Digest) -> Result<Option<proto::Directory>, Error> {
+    async fn get(&self, digest: &B3Digest) -> Result<Option<Directory>, Error> {
         let mut client = self.client.clone();
         let directory_key = derive_directory_key(digest);
 
@@ -274,28 +243,20 @@ impl DirectoryService for BigtableDirectoryService {
 
         // Try to parse the value into a Directory message.
         let directory = proto::Directory::decode(Bytes::from(row_cell.value))
-            .map_err(|e| Error::StorageError(format!("unable to decode directory proto: {}", e)))?;
-
-        // validate the Directory.
-        directory
-            .validate()
+            .map_err(|e| Error::StorageError(format!("unable to decode directory proto: {}", e)))?
+            .try_into()
             .map_err(|e| Error::StorageError(format!("invalid Directory message: {}", e)))?;
 
         Ok(Some(directory))
     }
 
     #[instrument(skip(self, directory), err, fields(directory.digest = %directory.digest()))]
-    async fn put(&self, directory: proto::Directory) -> Result<B3Digest, Error> {
+    async fn put(&self, directory: Directory) -> Result<B3Digest, Error> {
         let directory_digest = directory.digest();
         let mut client = self.client.clone();
         let directory_key = derive_directory_key(&directory_digest);
 
-        // Ensure the directory we're trying to upload passes validation
-        directory
-            .validate()
-            .map_err(|e| Error::InvalidRequest(format!("directory is invalid: {}", e)))?;
-
-        let data = directory.encode_to_vec();
+        let data = proto::Directory::from(directory).encode_to_vec();
         if data.len() as u64 > CELL_SIZE_LIMIT {
             return Err(Error::StorageError(
                 "Directory exceeds cell limit on Bigtable".into(),
@@ -343,7 +304,7 @@ impl DirectoryService for BigtableDirectoryService {
     fn get_recursive(
         &self,
         root_directory_digest: &B3Digest,
-    ) -> BoxStream<'static, Result<proto::Directory, Error>> {
+    ) -> BoxStream<'static, Result<Directory, Error>> {
         traverse_directory(self.clone(), root_directory_digest)
     }
 
@@ -355,3 +316,73 @@ impl DirectoryService for BigtableDirectoryService {
         Box::new(SimplePutter::new(self.clone()))
     }
 }
+
+/// Represents configuration of [BigtableDirectoryService].
+/// This currently conflates both connect parameters and data model/client
+/// behaviour parameters.
+#[serde_as]
+#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
+#[serde(deny_unknown_fields)]
+pub struct BigtableParameters {
+    project_id: String,
+    instance_name: String,
+    #[serde(default)]
+    is_read_only: bool,
+    #[serde(default = "default_channel_size")]
+    channel_size: usize,
+
+    #[serde_as(as = "Option<DurationSeconds<String>>")]
+    #[serde(default = "default_timeout")]
+    timeout: Option<std::time::Duration>,
+    table_name: String,
+    family_name: String,
+
+    #[serde(default = "default_app_profile_id")]
+    app_profile_id: String,
+}
+
+#[async_trait]
+impl ServiceBuilder for BigtableParameters {
+    type Output = dyn DirectoryService;
+    async fn build<'a>(
+        &'a self,
+        _instance_name: &str,
+        _context: &CompositionContext,
+    ) -> Result<Arc<dyn DirectoryService>, Box<dyn std::error::Error + Send + Sync>> {
+        Ok(Arc::new(
+            BigtableDirectoryService::connect(self.clone()).await?,
+        ))
+    }
+}
+
+impl TryFrom<url::Url> for BigtableParameters {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    fn try_from(mut url: url::Url) -> Result<Self, Self::Error> {
+        // parse the instance name from the hostname.
+        let instance_name = url
+            .host_str()
+            .ok_or_else(|| Error::StorageError("instance name missing".into()))?
+            .to_string();
+
+        // โ€ฆ but add it to the query string now, so we just need to parse that.
+        url.query_pairs_mut()
+            .append_pair("instance_name", &instance_name);
+
+        let params: BigtableParameters = serde_qs::from_str(url.query().unwrap_or_default())
+            .map_err(|e| Error::InvalidRequest(format!("failed to parse parameters: {}", e)))?;
+
+        Ok(params)
+    }
+}
+
+fn default_app_profile_id() -> String {
+    "default".to_owned()
+}
+
+fn default_channel_size() -> usize {
+    4
+}
+
+fn default_timeout() -> Option<std::time::Duration> {
+    Some(std::time::Duration::from_secs(4))
+}
diff --git a/tvix/castore/src/directoryservice/closure_validator.rs b/tvix/castore/src/directoryservice/closure_validator.rs
deleted file mode 100644
index b9746a5a0501..000000000000
--- a/tvix/castore/src/directoryservice/closure_validator.rs
+++ /dev/null
@@ -1,309 +0,0 @@
-use std::collections::{HashMap, HashSet};
-
-use bstr::ByteSlice;
-
-use petgraph::{
-    graph::{DiGraph, NodeIndex},
-    visit::{Bfs, Walker},
-};
-use tracing::instrument;
-
-use crate::{
-    proto::{self, Directory},
-    B3Digest, Error,
-};
-
-type DirectoryGraph = DiGraph<Directory, ()>;
-
-/// This can be used to validate a Directory closure (DAG of connected
-/// Directories), and their insertion order.
-///
-/// Directories need to be inserted (via `add`), in an order from the leaves to
-/// the root (DFS Post-Order).
-/// During insertion, We validate as much as we can at that time:
-///
-///  - individual validation of Directory messages
-///  - validation of insertion order (no upload of not-yet-known Directories)
-///  - validation of size fields of referred Directories
-///
-/// Internally it keeps all received Directories in a directed graph,
-/// with node weights being the Directories and edges pointing to child
-/// directories.
-///
-/// Once all Directories have been inserted, a finalize function can be
-/// called to get a (deduplicated and) validated list of directories, in
-/// insertion order.
-/// During finalize, a check for graph connectivity is performed too, to ensure
-/// there's no disconnected components, and only one root.
-#[derive(Default)]
-pub struct ClosureValidator {
-    // A directed graph, using Directory as node weight, without edge weights.
-    // Edges point from parents to children.
-    graph: DirectoryGraph,
-
-    // A lookup table from directory digest to node index.
-    digest_to_node_ix: HashMap<B3Digest, NodeIndex>,
-
-    /// Keeps track of the last-inserted directory graph node index.
-    /// On a correct insert, this will be the root node, from which the DFS post
-    /// order traversal will start from.
-    last_directory_ix: Option<NodeIndex>,
-}
-
-impl ClosureValidator {
-    /// Insert a new Directory into the closure.
-    /// Perform individual Directory validation, validation of insertion order
-    /// and size fields.
-    #[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest(), directory.size=%directory.size()), err)]
-    pub fn add(&mut self, directory: proto::Directory) -> Result<(), Error> {
-        let digest = directory.digest();
-
-        // If we already saw this node previously, it's already validated and in the graph.
-        if self.digest_to_node_ix.contains_key(&digest) {
-            return Ok(());
-        }
-
-        // Do some general validation
-        directory
-            .validate()
-            .map_err(|e| Error::InvalidRequest(e.to_string()))?;
-
-        // Ensure the directory only refers to directories which we already accepted.
-        // We lookup their node indices and add them to a HashSet.
-        let mut child_ixs = HashSet::new();
-        for dir in &directory.directories {
-            let child_digest = B3Digest::try_from(dir.digest.to_owned()).unwrap(); // validated
-
-            // Ensure the digest has already been seen
-            let child_ix = *self.digest_to_node_ix.get(&child_digest).ok_or_else(|| {
-                Error::InvalidRequest(format!(
-                    "'{}' refers to unseen child dir: {}",
-                    dir.name.as_bstr(),
-                    &child_digest
-                ))
-            })?;
-
-            // Ensure the size specified in the child node matches the directory size itself.
-            let recorded_child_size = self
-                .graph
-                .node_weight(child_ix)
-                .expect("node not found")
-                .size();
-
-            // Ensure the size specified in the child node matches our records.
-            if dir.size != recorded_child_size {
-                return Err(Error::InvalidRequest(format!(
-                    "'{}' has wrong size, specified {}, recorded {}",
-                    dir.name.as_bstr(),
-                    dir.size,
-                    recorded_child_size
-                )));
-            }
-
-            child_ixs.insert(child_ix);
-        }
-
-        // Insert node into the graph, and add edges to all children.
-        let node_ix = self.graph.add_node(directory);
-        for child_ix in child_ixs {
-            self.graph.add_edge(node_ix, child_ix, ());
-        }
-
-        // Record the mapping from digest to node_ix in our lookup table.
-        self.digest_to_node_ix.insert(digest, node_ix);
-
-        // Update last_directory_ix.
-        self.last_directory_ix = Some(node_ix);
-
-        Ok(())
-    }
-
-    /// Ensure that all inserted Directories are connected, then return a
-    /// (deduplicated) and validated list of directories, in from-leaves-to-root
-    /// order.
-    /// In case no elements have been inserted, returns an empty list.
-    #[instrument(level = "trace", skip_all, err)]
-    pub(crate) fn finalize(self) -> Result<Vec<Directory>, Error> {
-        let (graph, _) = match self.finalize_raw()? {
-            None => return Ok(vec![]),
-            Some(v) => v,
-        };
-        // Dissolve the graph, returning the nodes as a Vec.
-        // As the graph was populated in a valid DFS PostOrder, we can return
-        // nodes in that same order.
-        let (nodes, _edges) = graph.into_nodes_edges();
-        Ok(nodes.into_iter().map(|x| x.weight).collect())
-    }
-
-    /// Ensure that all inserted Directories are connected, then return a
-    /// (deduplicated) and validated list of directories, in from-root-to-leaves
-    /// order.
-    /// In case no elements have been inserted, returns an empty list.
-    #[instrument(level = "trace", skip_all, err)]
-    pub(crate) fn finalize_root_to_leaves(self) -> Result<Vec<Directory>, Error> {
-        let (graph, root) = match self.finalize_raw()? {
-            None => return Ok(vec![]),
-            Some(v) => v,
-        };
-
-        // do a BFS traversal of the graph, starting with the root node to get
-        // all nodes reachable from there.
-        let traversal = Bfs::new(&graph, root);
-
-        let order = traversal.iter(&graph).collect::<Vec<_>>();
-
-        let (nodes, _edges) = graph.into_nodes_edges();
-
-        // Convert to option, so that we can take individual nodes out without messing up the
-        // indices
-        let mut nodes = nodes.into_iter().map(Some).collect::<Vec<_>>();
-
-        Ok(order
-            .iter()
-            .map(|i| nodes[i.index()].take().unwrap().weight)
-            .collect())
-    }
-
-    /// Internal implementation of closure validation
-    #[instrument(level = "trace", skip_all, err)]
-    fn finalize_raw(self) -> Result<Option<(DirectoryGraph, NodeIndex)>, Error> {
-        // If no nodes were inserted, an empty list is returned.
-        let last_directory_ix = if let Some(x) = self.last_directory_ix {
-            x
-        } else {
-            return Ok(None);
-        };
-
-        // do a BFS traversal of the graph, starting with the root node to get
-        // (the count of) all nodes reachable from there.
-        let mut traversal = Bfs::new(&self.graph, last_directory_ix);
-
-        let mut visited_directory_count = 0;
-        #[cfg(debug_assertions)]
-        let mut visited_directory_ixs = HashSet::new();
-        #[cfg_attr(not(debug_assertions), allow(unused))]
-        while let Some(directory_ix) = traversal.next(&self.graph) {
-            #[cfg(debug_assertions)]
-            visited_directory_ixs.insert(directory_ix);
-
-            visited_directory_count += 1;
-        }
-
-        // If the number of nodes collected equals the total number of nodes in
-        // the graph, we know all nodes are connected.
-        if visited_directory_count != self.graph.node_count() {
-            // more or less exhaustive error reporting.
-            #[cfg(debug_assertions)]
-            {
-                let all_directory_ixs: HashSet<_> = self.graph.node_indices().collect();
-
-                let unvisited_directories: HashSet<_> = all_directory_ixs
-                    .difference(&visited_directory_ixs)
-                    .map(|ix| self.graph.node_weight(*ix).expect("node not found"))
-                    .collect();
-
-                return Err(Error::InvalidRequest(format!(
-                    "found {} disconnected directories: {:?}",
-                    self.graph.node_count() - visited_directory_ixs.len(),
-                    unvisited_directories
-                )));
-            }
-            #[cfg(not(debug_assertions))]
-            {
-                return Err(Error::InvalidRequest(format!(
-                    "found {} disconnected directories",
-                    self.graph.node_count() - visited_directory_count
-                )));
-            }
-        }
-
-        Ok(Some((self.graph, last_directory_ix)))
-    }
-}
-
-#[cfg(test)]
-mod tests {
-    use crate::{
-        fixtures::{DIRECTORY_A, DIRECTORY_B, DIRECTORY_C},
-        proto::{self, Directory},
-    };
-    use lazy_static::lazy_static;
-    use rstest::rstest;
-
-    lazy_static! {
-        pub static ref BROKEN_DIRECTORY : Directory = Directory {
-            symlinks: vec![proto::SymlinkNode {
-                name: "".into(), // invalid name!
-                target: "doesntmatter".into(),
-            }],
-            ..Default::default()
-        };
-
-        pub static ref BROKEN_PARENT_DIRECTORY: Directory = Directory {
-            directories: vec![proto::DirectoryNode {
-                name: "foo".into(),
-                digest: DIRECTORY_A.digest().into(),
-                size: DIRECTORY_A.size() + 42, // wrong!
-            }],
-            ..Default::default()
-        };
-    }
-
-    use super::ClosureValidator;
-
-    #[rstest]
-    /// Uploading an empty directory should succeed.
-    #[case::empty_directory(&[&*DIRECTORY_A], false, Some(vec![&*DIRECTORY_A]))]
-    /// Uploading A, then B (referring to A) should succeed.
-    #[case::simple_closure(&[&*DIRECTORY_A, &*DIRECTORY_B], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_B]))]
-    /// Uploading A, then A, then C (referring to A twice) should succeed.
-    /// We pretend to be a dumb client not deduping directories.
-    #[case::same_child(&[&*DIRECTORY_A, &*DIRECTORY_A, &*DIRECTORY_C], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_C]))]
-    /// Uploading A, then C (referring to A twice) should succeed.
-    #[case::same_child_dedup(&[&*DIRECTORY_A, &*DIRECTORY_C], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_C]))]
-    /// Uploading A, then C (referring to A twice), then B (itself referring to A) should fail during close,
-    /// as B itself would be left unconnected.
-    #[case::unconnected_node(&[&*DIRECTORY_A, &*DIRECTORY_C, &*DIRECTORY_B], false, None)]
-    /// Uploading B (referring to A) should fail immediately, because A was never uploaded.
-    #[case::dangling_pointer(&[&*DIRECTORY_B], true, None)]
-    /// Uploading a directory failing validation should fail immediately.
-    #[case::failing_validation(&[&*BROKEN_DIRECTORY], true, None)]
-    /// Uploading a directory which refers to another Directory with a wrong size should fail.
-    #[case::wrong_size_in_parent(&[&*DIRECTORY_A, &*BROKEN_PARENT_DIRECTORY], true, None)]
-    fn test_uploads(
-        #[case] directories_to_upload: &[&Directory],
-        #[case] exp_fail_upload_last: bool,
-        #[case] exp_finalize: Option<Vec<&Directory>>, // Some(_) if finalize successful, None if not.
-    ) {
-        let mut dcv = ClosureValidator::default();
-        let len_directories_to_upload = directories_to_upload.len();
-
-        for (i, d) in directories_to_upload.iter().enumerate() {
-            let resp = dcv.add((*d).clone());
-            if i == len_directories_to_upload - 1 && exp_fail_upload_last {
-                assert!(resp.is_err(), "expect last put to fail");
-
-                // We don't really care anymore what finalize() would return, as
-                // the add() failed.
-                return;
-            } else {
-                assert!(resp.is_ok(), "expect put to succeed");
-            }
-        }
-
-        // everything was uploaded successfully. Test finalize().
-        let resp = dcv.finalize();
-
-        match exp_finalize {
-            Some(directories) => {
-                assert_eq!(
-                    Vec::from_iter(directories.iter().map(|e| (*e).to_owned())),
-                    resp.expect("drain should succeed")
-                );
-            }
-            None => {
-                resp.expect_err("drain should fail");
-            }
-        }
-    }
-}
diff --git a/tvix/castore/src/directoryservice/combinators.rs b/tvix/castore/src/directoryservice/combinators.rs
new file mode 100644
index 000000000000..4283142231f9
--- /dev/null
+++ b/tvix/castore/src/directoryservice/combinators.rs
@@ -0,0 +1,180 @@
+use std::sync::Arc;
+
+use futures::stream::BoxStream;
+use futures::StreamExt;
+use futures::TryFutureExt;
+use futures::TryStreamExt;
+use tonic::async_trait;
+use tracing::{instrument, trace};
+
+use super::{Directory, DirectoryGraph, DirectoryService, RootToLeavesValidator, SimplePutter};
+use crate::composition::{CompositionContext, ServiceBuilder};
+use crate::directoryservice::DirectoryPutter;
+use crate::B3Digest;
+use crate::Error;
+
+/// Asks near first, if not found, asks far.
+/// If found in there, returns it, and *inserts* it into
+/// near.
+/// Specifically, it always obtains the entire directory closure from far and inserts it into near,
+/// which is useful when far does not support accessing intermediate directories (but near does).
+/// There is no negative cache.
+/// Inserts and listings are not implemented for now.
+#[derive(Clone)]
+pub struct Cache<DS1, DS2> {
+    near: DS1,
+    far: DS2,
+}
+
+impl<DS1, DS2> Cache<DS1, DS2> {
+    pub fn new(near: DS1, far: DS2) -> Self {
+        Self { near, far }
+    }
+}
+
+#[async_trait]
+impl<DS1, DS2> DirectoryService for Cache<DS1, DS2>
+where
+    DS1: DirectoryService + Clone + 'static,
+    DS2: DirectoryService + Clone + 'static,
+{
+    #[instrument(skip(self, digest), fields(directory.digest = %digest))]
+    async fn get(&self, digest: &B3Digest) -> Result<Option<Directory>, Error> {
+        match self.near.get(digest).await? {
+            Some(directory) => {
+                trace!("serving from cache");
+                Ok(Some(directory))
+            }
+            None => {
+                trace!("not found in near, asking remoteโ€ฆ");
+
+                let mut copy = DirectoryGraph::with_order(
+                    RootToLeavesValidator::new_with_root_digest(digest.clone()),
+                );
+
+                let mut stream = self.far.get_recursive(digest);
+                let root = stream.try_next().await?;
+
+                if let Some(root) = root.clone() {
+                    copy.add(root)
+                        .map_err(|e| Error::StorageError(e.to_string()))?;
+                }
+
+                while let Some(dir) = stream.try_next().await? {
+                    copy.add(dir)
+                        .map_err(|e| Error::StorageError(e.to_string()))?;
+                }
+
+                let copy = copy
+                    .validate()
+                    .map_err(|e| Error::StorageError(e.to_string()))?;
+
+                let mut put = self.near.put_multiple_start();
+                for dir in copy.drain_leaves_to_root() {
+                    put.put(dir).await?;
+                }
+                put.close().await?;
+
+                Ok(root)
+            }
+        }
+    }
+
+    #[instrument(skip_all)]
+    async fn put(&self, _directory: Directory) -> Result<B3Digest, Error> {
+        Err(Error::StorageError("unimplemented".to_string()))
+    }
+
+    #[instrument(skip_all, fields(directory.digest = %root_directory_digest))]
+    fn get_recursive(
+        &self,
+        root_directory_digest: &B3Digest,
+    ) -> BoxStream<'static, Result<Directory, Error>> {
+        let near = self.near.clone();
+        let far = self.far.clone();
+        let digest = root_directory_digest.clone();
+        Box::pin(
+            (async move {
+                let mut stream = near.get_recursive(&digest);
+                match stream.try_next().await? {
+                    Some(first) => {
+                        trace!("serving from cache");
+                        Ok(futures::stream::once(async { Ok(first) })
+                            .chain(stream)
+                            .left_stream())
+                    }
+                    None => {
+                        trace!("not found in near, asking remoteโ€ฆ");
+
+                        let mut copy_for_near = DirectoryGraph::with_order(
+                            RootToLeavesValidator::new_with_root_digest(digest.clone()),
+                        );
+                        let mut copy_for_client = vec![];
+
+                        let mut stream = far.get_recursive(&digest);
+                        while let Some(dir) = stream.try_next().await? {
+                            copy_for_near
+                                .add(dir.clone())
+                                .map_err(|e| Error::StorageError(e.to_string()))?;
+                            copy_for_client.push(dir);
+                        }
+
+                        let copy_for_near = copy_for_near
+                            .validate()
+                            .map_err(|e| Error::StorageError(e.to_string()))?;
+                        let mut put = near.put_multiple_start();
+                        for dir in copy_for_near.drain_leaves_to_root() {
+                            put.put(dir).await?;
+                        }
+                        put.close().await?;
+
+                        Ok(futures::stream::iter(copy_for_client.into_iter().map(Ok))
+                            .right_stream())
+                    }
+                }
+            })
+            .try_flatten_stream(),
+        )
+    }
+
+    #[instrument(skip_all)]
+    fn put_multiple_start(&self) -> Box<(dyn DirectoryPutter + 'static)> {
+        Box::new(SimplePutter::new((*self).clone()))
+    }
+}
+
+#[derive(serde::Deserialize, Debug)]
+#[serde(deny_unknown_fields)]
+pub struct CacheConfig {
+    near: String,
+    far: String,
+}
+
+impl TryFrom<url::Url> for CacheConfig {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    fn try_from(_url: url::Url) -> Result<Self, Self::Error> {
+        Err(Error::StorageError(
+            "Instantiating a CombinedDirectoryService from a url is not supported".into(),
+        )
+        .into())
+    }
+}
+
+#[async_trait]
+impl ServiceBuilder for CacheConfig {
+    type Output = dyn DirectoryService;
+    async fn build<'a>(
+        &'a self,
+        _instance_name: &str,
+        context: &CompositionContext,
+    ) -> Result<Arc<dyn DirectoryService>, Box<dyn std::error::Error + Send + Sync + 'static>> {
+        let (near, far) = futures::join!(
+            context.resolve(self.near.clone()),
+            context.resolve(self.far.clone())
+        );
+        Ok(Arc::new(Cache {
+            near: near?,
+            far: far?,
+        }))
+    }
+}
diff --git a/tvix/castore/src/directoryservice/directory_graph.rs b/tvix/castore/src/directoryservice/directory_graph.rs
new file mode 100644
index 000000000000..017cef024059
--- /dev/null
+++ b/tvix/castore/src/directoryservice/directory_graph.rs
@@ -0,0 +1,414 @@
+use std::collections::HashMap;
+
+use petgraph::{
+    graph::{DiGraph, NodeIndex},
+    visit::{Bfs, DfsPostOrder, EdgeRef, IntoNodeIdentifiers, Walker},
+    Direction, Incoming,
+};
+use tracing::instrument;
+
+use super::order_validator::{LeavesToRootValidator, OrderValidator, RootToLeavesValidator};
+use crate::{path::PathComponent, B3Digest, Directory, Node};
+
+#[derive(thiserror::Error, Debug)]
+pub enum Error {
+    #[error("{0}")]
+    ValidationError(String),
+}
+
+struct EdgeWeight {
+    name: PathComponent,
+    size: u64,
+}
+
+/// This can be used to validate and/or re-order a Directory closure (DAG of
+/// connected Directories), and their insertion order.
+///
+/// The DirectoryGraph is parametrized on the insertion order, and can be
+/// constructed using the Default trait, or using `with_order` if the
+/// OrderValidator needs to be customized.
+///
+/// If the user is receiving directories from canonical protobuf encoding in
+/// root-to-leaves order, and parsing them, she can call `digest_allowed`
+/// _before_ parsing the protobuf record and then add it with `add_unchecked`.
+/// All other users insert the directories via `add`, in their specified order.
+/// During insertion, we validate as much as we can at that time:
+///
+///  - individual validation of Directory messages
+///  - validation of insertion order
+///  - validation of size fields of referred Directories
+///
+/// Internally it keeps all received Directories in a directed graph,
+/// with node weights being the Directories and edges pointing to child/parent
+/// directories.
+///
+/// Once all Directories have been inserted, a validate function can be
+/// called to perform a check for graph connectivity and ensure there's no
+/// disconnected components or missing nodes.
+/// Finally, the `drain_leaves_to_root` or `drain_root_to_leaves` can be
+/// _chained_ on validate to get an iterator over the (deduplicated and)
+/// validated list of directories in either order.
+#[derive(Default)]
+pub struct DirectoryGraph<O> {
+    // A directed graph, using Directory as node weight.
+    // Edges point from parents to children.
+    //
+    // Nodes with None weigths might exist when a digest has been referred to but the directory
+    // with this digest has not yet been sent.
+    //
+    // The option in the edge weight tracks the pending validation state of the respective edge, for example if
+    // the child has not been added yet.
+    graph: DiGraph<Option<Directory>, Option<EdgeWeight>>,
+
+    // A lookup table from directory digest to node index.
+    digest_to_node_ix: HashMap<B3Digest, NodeIndex>,
+
+    order_validator: O,
+}
+
+pub struct ValidatedDirectoryGraph {
+    graph: DiGraph<Option<Directory>, Option<EdgeWeight>>,
+
+    root: Option<NodeIndex>,
+}
+
+fn check_edge(edge: &EdgeWeight, child: &Directory) -> Result<(), Error> {
+    // Ensure the size specified in the child node matches our records.
+    if edge.size != child.size() {
+        return Err(Error::ValidationError(format!(
+            "'{}' has wrong size, specified {}, recorded {}",
+            edge.name,
+            edge.size,
+            child.size(),
+        )));
+    }
+    Ok(())
+}
+
+impl DirectoryGraph<LeavesToRootValidator> {
+    /// Insert a new Directory into the closure
+    #[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest(), directory.size=%directory.size()), err)]
+    pub fn add(&mut self, directory: Directory) -> Result<(), Error> {
+        if !self.order_validator.add_directory(&directory) {
+            return Err(Error::ValidationError(
+                "unknown directory was referenced".into(),
+            ));
+        }
+        self.add_order_unchecked(directory)
+    }
+}
+
+impl DirectoryGraph<RootToLeavesValidator> {
+    /// If the user is parsing directories from canonical protobuf encoding, she can
+    /// call `digest_allowed` _before_ parsing the protobuf record and then add it
+    /// with `add_unchecked`.
+    pub fn digest_allowed(&self, digest: B3Digest) -> bool {
+        self.order_validator.digest_allowed(&digest)
+    }
+
+    /// Insert a new Directory into the closure
+    #[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest(), directory.size=%directory.size()), err)]
+    pub fn add(&mut self, directory: Directory) -> Result<(), Error> {
+        let digest = directory.digest();
+        if !self.order_validator.digest_allowed(&digest) {
+            return Err(Error::ValidationError("unexpected digest".into()));
+        }
+        self.order_validator.add_directory_unchecked(&directory);
+        self.add_order_unchecked(directory)
+    }
+}
+
+impl<O: OrderValidator> DirectoryGraph<O> {
+    /// Customize the ordering, i.e. for pre-setting the root of the RootToLeavesValidator
+    pub fn with_order(order_validator: O) -> Self {
+        Self {
+            graph: Default::default(),
+            digest_to_node_ix: Default::default(),
+            order_validator,
+        }
+    }
+
+    /// Adds a directory which has already been confirmed to be in-order to the graph
+    pub fn add_order_unchecked(&mut self, directory: Directory) -> Result<(), Error> {
+        let digest = directory.digest();
+
+        // Teach the graph about the existence of a node with this digest
+        let ix = *self
+            .digest_to_node_ix
+            .entry(digest)
+            .or_insert_with(|| self.graph.add_node(None));
+
+        if self.graph[ix].is_some() {
+            // The node is already in the graph, there is nothing to do here.
+            return Ok(());
+        }
+
+        // set up edges to all child directories
+        for (name, node) in directory.nodes() {
+            if let Node::Directory { digest, size } = node {
+                let child_ix = *self
+                    .digest_to_node_ix
+                    .entry(digest.clone())
+                    .or_insert_with(|| self.graph.add_node(None));
+
+                let pending_edge_check = match &self.graph[child_ix] {
+                    Some(child) => {
+                        // child is already available, validate the edge now
+                        check_edge(
+                            &EdgeWeight {
+                                name: name.clone(),
+                                size: *size,
+                            },
+                            child,
+                        )?;
+                        None
+                    }
+                    None => Some(EdgeWeight {
+                        name: name.clone(),
+                        size: *size,
+                    }), // pending validation
+                };
+                self.graph.add_edge(ix, child_ix, pending_edge_check);
+            }
+        }
+
+        // validate the edges from parents to this node
+        // this collects edge ids in a Vec because there is no edges_directed_mut :'c
+        for edge_id in self
+            .graph
+            .edges_directed(ix, Direction::Incoming)
+            .map(|edge_ref| edge_ref.id())
+            .collect::<Vec<_>>()
+            .into_iter()
+        {
+            let edge_weight = self
+                .graph
+                .edge_weight_mut(edge_id)
+                .expect("edge not found")
+                .take()
+                .expect("edge is already validated");
+
+            check_edge(&edge_weight, &directory)?;
+        }
+
+        // finally, store the directory information in the node weight
+        self.graph[ix] = Some(directory);
+
+        Ok(())
+    }
+
+    #[instrument(level = "trace", skip_all, err)]
+    pub fn validate(self) -> Result<ValidatedDirectoryGraph, Error> {
+        // find all initial nodes (nodes without incoming edges)
+        let mut roots = self
+            .graph
+            .node_identifiers()
+            .filter(|&a| self.graph.neighbors_directed(a, Incoming).next().is_none());
+
+        let root = roots.next();
+        if roots.next().is_some() {
+            return Err(Error::ValidationError(
+                "graph has disconnected roots".into(),
+            ));
+        }
+
+        // test that the graph is complete
+        if self.graph.raw_nodes().iter().any(|n| n.weight.is_none()) {
+            return Err(Error::ValidationError("graph is incomplete".into()));
+        }
+
+        Ok(ValidatedDirectoryGraph {
+            graph: self.graph,
+            root,
+        })
+    }
+}
+
+impl ValidatedDirectoryGraph {
+    /// Return the list of directories in from-root-to-leaves order.
+    /// In case no elements have been inserted, returns an empty list.
+    ///
+    /// panics if the specified root is not in the graph
+    #[instrument(level = "trace", skip_all)]
+    pub fn drain_root_to_leaves(self) -> impl Iterator<Item = Directory> {
+        let order = match self.root {
+            Some(root) => {
+                // do a BFS traversal of the graph, starting with the root node
+                Bfs::new(&self.graph, root)
+                    .iter(&self.graph)
+                    .collect::<Vec<_>>()
+            }
+            None => vec![], // No nodes have been inserted, do not traverse
+        };
+
+        let (mut nodes, _edges) = self.graph.into_nodes_edges();
+
+        order
+            .into_iter()
+            .filter_map(move |i| nodes[i.index()].weight.take())
+    }
+
+    /// Return the list of directories in from-leaves-to-root order.
+    /// In case no elements have been inserted, returns an empty list.
+    ///
+    /// panics when the specified root is not in the graph
+    #[instrument(level = "trace", skip_all)]
+    pub fn drain_leaves_to_root(self) -> impl Iterator<Item = Directory> {
+        let order = match self.root {
+            Some(root) => {
+                // do a DFS Post-Order traversal of the graph, starting with the root node
+                DfsPostOrder::new(&self.graph, root)
+                    .iter(&self.graph)
+                    .collect::<Vec<_>>()
+            }
+            None => vec![], // No nodes have been inserted, do not traverse
+        };
+
+        let (mut nodes, _edges) = self.graph.into_nodes_edges();
+
+        order
+            .into_iter()
+            .filter_map(move |i| nodes[i.index()].weight.take())
+    }
+}
+/*
+        pub static ref BROKEN_DIRECTORY : Directory = Directory {
+            symlinks: vec![SymlinkNode {
+                name: "".into(), // invalid name!
+                target: "doesntmatter".into(),
+            }],
+            ..Default::default()
+        };
+*/
+#[cfg(test)]
+mod tests {
+    use crate::fixtures::{DIRECTORY_A, DIRECTORY_B, DIRECTORY_C};
+    use crate::{Directory, Node};
+    use lazy_static::lazy_static;
+    use rstest::rstest;
+
+    use super::{DirectoryGraph, LeavesToRootValidator, RootToLeavesValidator};
+
+    lazy_static! {
+        pub static ref BROKEN_PARENT_DIRECTORY: Directory =
+            Directory::try_from_iter([
+                (
+                    "foo".try_into().unwrap(),
+                    Node::Directory{
+                        digest: DIRECTORY_A.digest(),
+                        size: DIRECTORY_A.size() + 42, // wrong!
+                    }
+                )
+            ]).unwrap();
+    }
+
+    #[rstest]
+    /// Uploading an empty directory should succeed.
+    #[case::empty_directory(&[&*DIRECTORY_A], false, Some(vec![&*DIRECTORY_A]))]
+    /// Uploading A, then B (referring to A) should succeed.
+    #[case::simple_closure(&[&*DIRECTORY_A, &*DIRECTORY_B], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_B]))]
+    /// Uploading A, then A, then C (referring to A twice) should succeed.
+    /// We pretend to be a dumb client not deduping directories.
+    #[case::same_child(&[&*DIRECTORY_A, &*DIRECTORY_A, &*DIRECTORY_C], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_C]))]
+    /// Uploading A, then C (referring to A twice) should succeed.
+    #[case::same_child_dedup(&[&*DIRECTORY_A, &*DIRECTORY_C], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_C]))]
+    /// Uploading A, then C (referring to A twice), then B (itself referring to A) should fail during close,
+    /// as B itself would be left unconnected.
+    #[case::unconnected_node(&[&*DIRECTORY_A, &*DIRECTORY_C, &*DIRECTORY_B], false, None)]
+    /// Uploading B (referring to A) should fail immediately, because A was never uploaded.
+    #[case::dangling_pointer(&[&*DIRECTORY_B], true, None)]
+    /// Uploading a directory which refers to another Directory with a wrong size should fail.
+    #[case::wrong_size_in_parent(&[&*DIRECTORY_A, &*BROKEN_PARENT_DIRECTORY], true, None)]
+    fn test_uploads(
+        #[case] directories_to_upload: &[&Directory],
+        #[case] exp_fail_upload_last: bool,
+        #[case] exp_finalize: Option<Vec<&Directory>>, // Some(_) if finalize successful, None if not.
+    ) {
+        let mut dcv = DirectoryGraph::<LeavesToRootValidator>::default();
+        let len_directories_to_upload = directories_to_upload.len();
+
+        for (i, d) in directories_to_upload.iter().enumerate() {
+            let resp = dcv.add((*d).clone());
+            if i == len_directories_to_upload - 1 && exp_fail_upload_last {
+                assert!(resp.is_err(), "expect last put to fail");
+
+                // We don't really care anymore what finalize() would return, as
+                // the add() failed.
+                return;
+            } else {
+                assert!(resp.is_ok(), "expect put to succeed");
+            }
+        }
+
+        // everything was uploaded successfully. Test finalize().
+        let resp = dcv
+            .validate()
+            .map(|validated| validated.drain_leaves_to_root().collect::<Vec<_>>());
+
+        match exp_finalize {
+            Some(directories) => {
+                assert_eq!(
+                    Vec::from_iter(directories.iter().map(|e| (*e).to_owned())),
+                    resp.expect("drain should succeed")
+                );
+            }
+            None => {
+                resp.expect_err("drain should fail");
+            }
+        }
+    }
+
+    #[rstest]
+    /// Downloading an empty directory should succeed.
+    #[case::empty_directory(&*DIRECTORY_A, &[&*DIRECTORY_A], false, Some(vec![&*DIRECTORY_A]))]
+    /// Downlading B, then A (referenced by B) should succeed.
+    #[case::simple_closure(&*DIRECTORY_B, &[&*DIRECTORY_B, &*DIRECTORY_A], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_B]))]
+    /// Downloading C (referring to A twice), then A should succeed.
+    #[case::same_child_dedup(&*DIRECTORY_C, &[&*DIRECTORY_C, &*DIRECTORY_A], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_C]))]
+    /// Downloading C, then B (both referring to A but not referring to each other) should fail immediately as B has no connection to C (the root)
+    #[case::unconnected_node(&*DIRECTORY_C, &[&*DIRECTORY_C, &*DIRECTORY_B], true, None)]
+    /// Downloading B (specified as the root) but receiving A instead should fail immediately, because A has no connection to B (the root).
+    #[case::dangling_pointer(&*DIRECTORY_B, &[&*DIRECTORY_A], true, None)]
+    /// Downloading a directory which refers to another Directory with a wrong size should fail.
+    #[case::wrong_size_in_parent(&*BROKEN_PARENT_DIRECTORY, &[&*BROKEN_PARENT_DIRECTORY, &*DIRECTORY_A], true, None)]
+    fn test_downloads(
+        #[case] root: &Directory,
+        #[case] directories_to_upload: &[&Directory],
+        #[case] exp_fail_upload_last: bool,
+        #[case] exp_finalize: Option<Vec<&Directory>>, // Some(_) if finalize successful, None if not.
+    ) {
+        let mut dcv =
+            DirectoryGraph::with_order(RootToLeavesValidator::new_with_root_digest(root.digest()));
+        let len_directories_to_upload = directories_to_upload.len();
+
+        for (i, d) in directories_to_upload.iter().enumerate() {
+            let resp = dcv.add((*d).clone());
+            if i == len_directories_to_upload - 1 && exp_fail_upload_last {
+                assert!(resp.is_err(), "expect last put to fail");
+
+                // We don't really care anymore what finalize() would return, as
+                // the add() failed.
+                return;
+            } else {
+                assert!(resp.is_ok(), "expect put to succeed");
+            }
+        }
+
+        // everything was uploaded successfully. Test finalize().
+        let resp = dcv
+            .validate()
+            .map(|validated| validated.drain_leaves_to_root().collect::<Vec<_>>());
+
+        match exp_finalize {
+            Some(directories) => {
+                assert_eq!(
+                    Vec::from_iter(directories.iter().map(|e| (*e).to_owned())),
+                    resp.expect("drain should succeed")
+                );
+            }
+            None => {
+                resp.expect_err("drain should fail");
+            }
+        }
+    }
+}
diff --git a/tvix/castore/src/directoryservice/from_addr.rs b/tvix/castore/src/directoryservice/from_addr.rs
index ee675ca68a9f..3feb8f3509fe 100644
--- a/tvix/castore/src/directoryservice/from_addr.rs
+++ b/tvix/castore/src/directoryservice/from_addr.rs
@@ -1,12 +1,13 @@
-use url::Url;
+use std::sync::Arc;
 
-use crate::{proto::directory_service_client::DirectoryServiceClient, Error};
+use url::Url;
 
-use super::{
-    DirectoryService, GRPCDirectoryService, MemoryDirectoryService, ObjectStoreDirectoryService,
-    SledDirectoryService,
+use crate::composition::{
+    with_registry, CompositionContext, DeserializeWithRegistry, ServiceBuilder, REG,
 };
 
+use super::DirectoryService;
+
 /// Constructs a new instance of a [DirectoryService] from an URI.
 ///
 /// The following URIs are supported:
@@ -17,98 +18,32 @@ use super::{
 /// - `sled:///absolute/path/to/somewhere`
 ///   Uses sled, using a path on the disk for persistency. Can be only opened
 ///   from one process at the same time.
+/// - `redb:`
+///   Uses a in-memory redb implementation.
+/// - `redb:///absolute/path/to/somewhere`
+///   Uses redb, using a path on the disk for persistency. Can be only opened
+///   from one process at the same time.
 /// - `grpc+unix:///absolute/path/to/somewhere`
 ///   Connects to a local tvix-store gRPC service via Unix socket.
 /// - `grpc+http://host:port`, `grpc+https://host:port`
 ///    Connects to a (remote) tvix-store gRPC service.
-pub async fn from_addr(uri: &str) -> Result<Box<dyn DirectoryService>, crate::Error> {
+pub async fn from_addr(
+    uri: &str,
+) -> Result<Arc<dyn DirectoryService>, Box<dyn std::error::Error + Send + Sync>> {
     #[allow(unused_mut)]
     let mut url = Url::parse(uri)
         .map_err(|e| crate::Error::StorageError(format!("unable to parse url: {}", e)))?;
 
-    let directory_service: Box<dyn DirectoryService> = match url.scheme() {
-        "memory" => {
-            // memory doesn't support host or path in the URL.
-            if url.has_host() || !url.path().is_empty() {
-                return Err(Error::StorageError("invalid url".to_string()));
-            }
-            Box::<MemoryDirectoryService>::default()
-        }
-        "sled" => {
-            // sled doesn't support host, and a path can be provided (otherwise
-            // it'll live in memory only).
-            if url.has_host() {
-                return Err(Error::StorageError("no host allowed".to_string()));
-            }
-
-            if url.path() == "/" {
-                return Err(Error::StorageError(
-                    "cowardly refusing to open / with sled".to_string(),
-                ));
-            }
-
-            // TODO: expose compression and other parameters as URL parameters?
-
-            Box::new(if url.path().is_empty() {
-                SledDirectoryService::new_temporary()
-                    .map_err(|e| Error::StorageError(e.to_string()))?
-            } else {
-                SledDirectoryService::new(url.path())
-                    .map_err(|e| Error::StorageError(e.to_string()))?
-            })
-        }
-        scheme if scheme.starts_with("grpc+") => {
-            // schemes starting with grpc+ go to the GRPCPathInfoService.
-            //   That's normally grpc+unix for unix sockets, and grpc+http(s) for the HTTP counterparts.
-            // - In the case of unix sockets, there must be a path, but may not be a host.
-            // - In the case of non-unix sockets, there must be a host, but no path.
-            // Constructing the channel is handled by tvix_castore::channel::from_url.
-            let client = DirectoryServiceClient::new(crate::tonic::channel_from_url(&url).await?);
-            Box::new(GRPCDirectoryService::from_client(client))
-        }
-        scheme if scheme.starts_with("objectstore+") => {
-            // We need to convert the URL to string, strip the prefix there, and then
-            // parse it back as url, as Url::set_scheme() rejects some of the transitions we want to do.
-            let trimmed_url = {
-                let s = url.to_string();
-                Url::parse(s.strip_prefix("objectstore+").unwrap()).unwrap()
-            };
-            Box::new(
-                ObjectStoreDirectoryService::parse_url(&trimmed_url)
-                    .map_err(|e| Error::StorageError(e.to_string()))?,
-            )
-        }
-        #[cfg(feature = "cloud")]
-        "bigtable" => {
-            use super::bigtable::BigtableParameters;
-            use super::BigtableDirectoryService;
-
-            // parse the instance name from the hostname.
-            let instance_name = url
-                .host_str()
-                .ok_or_else(|| Error::StorageError("instance name missing".into()))?
-                .to_string();
-
-            // โ€ฆ but add it to the query string now, so we just need to parse that.
-            url.query_pairs_mut()
-                .append_pair("instance_name", &instance_name);
-
-            let params: BigtableParameters = serde_qs::from_str(url.query().unwrap_or_default())
-                .map_err(|e| Error::InvalidRequest(format!("failed to parse parameters: {}", e)))?;
+    let directory_service_config = with_registry(&REG, || {
+        <DeserializeWithRegistry<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>>>::try_from(
+            url,
+        )
+    })?
+    .0;
+    let directory_service = directory_service_config
+        .build("anonymous", &CompositionContext::blank())
+        .await?;
 
-            Box::new(
-                BigtableDirectoryService::connect(params)
-                    .await
-                    .map_err(|e| Error::StorageError(e.to_string()))?,
-            )
-        }
-        _ => {
-            return Err(crate::Error::StorageError(format!(
-                "unknown scheme: {}",
-                url.scheme()
-            )))
-        }
-    };
     Ok(directory_service)
 }
 
@@ -122,6 +57,8 @@ mod tests {
     lazy_static! {
         static ref TMPDIR_SLED_1: TempDir = TempDir::new().unwrap();
         static ref TMPDIR_SLED_2: TempDir = TempDir::new().unwrap();
+        static ref TMPDIR_REDB_1: TempDir = TempDir::new().unwrap();
+        static ref TMPDIR_REDB_2: TempDir = TempDir::new().unwrap();
     }
 
     #[rstest]
@@ -145,6 +82,16 @@ mod tests {
     #[case::memory_invalid_root_path("memory:///", false)]
     /// This sets a memory url path to "/foo", which is invalid.
     #[case::memory_invalid_root_path_foo("memory:///foo", false)]
+    /// This configures redb in temporary mode.
+    #[case::redb_valid_temporary("redb://", true)]
+    /// This configures redb with /, which should fail.
+    #[case::redb_invalid_root("redb:///", false)]
+    /// This configures redb with a host, not path, which should fail.
+    #[case::redb_invalid_host("redb://foo.example", false)]
+    /// This configures redb with a valid path, which should succeed.
+    #[case::redb_valid_path(&format!("redb://{}", &TMPDIR_REDB_1.path().join("foo").to_str().unwrap()), true)]
+    /// This configures redb with a host, and a valid path path, which should fail.
+    #[case::redb_invalid_host_with_valid_path(&format!("redb://foo.example{}", &TMPDIR_REDB_2.path().join("bar").to_str().unwrap()), false)]
     /// Correct scheme to connect to a unix socket.
     #[case::grpc_valid_unix_socket("grpc+unix:///path/to/somewhere", true)]
     /// Correct scheme for unix socket, but setting a host too, which is invalid.
diff --git a/tvix/castore/src/directoryservice/grpc.rs b/tvix/castore/src/directoryservice/grpc.rs
index fe935629bfcb..9696c5631949 100644
--- a/tvix/castore/src/directoryservice/grpc.rs
+++ b/tvix/castore/src/directoryservice/grpc.rs
@@ -1,44 +1,47 @@
 use std::collections::HashSet;
 
-use super::{DirectoryPutter, DirectoryService};
+use super::{Directory, DirectoryPutter, DirectoryService};
+use crate::composition::{CompositionContext, ServiceBuilder};
 use crate::proto::{self, get_directory_request::ByWhat};
-use crate::{B3Digest, Error};
+use crate::{B3Digest, DirectoryError, Error};
 use async_stream::try_stream;
 use futures::stream::BoxStream;
+use std::sync::Arc;
 use tokio::spawn;
 use tokio::sync::mpsc::UnboundedSender;
 use tokio::task::JoinHandle;
 use tokio_stream::wrappers::UnboundedReceiverStream;
-use tonic::async_trait;
-use tonic::Code;
-use tonic::{transport::Channel, Status};
-use tracing::{instrument, warn};
+use tonic::{async_trait, Code, Status};
+use tracing::{instrument, warn, Instrument as _};
 
 /// Connects to a (remote) tvix-store DirectoryService over gRPC.
 #[derive(Clone)]
-pub struct GRPCDirectoryService {
+pub struct GRPCDirectoryService<T> {
     /// The internal reference to a gRPC client.
     /// Cloning it is cheap, and it internally handles concurrent requests.
-    grpc_client: proto::directory_service_client::DirectoryServiceClient<Channel>,
+    grpc_client: proto::directory_service_client::DirectoryServiceClient<T>,
 }
 
-impl GRPCDirectoryService {
+impl<T> GRPCDirectoryService<T> {
     /// construct a [GRPCDirectoryService] from a [proto::directory_service_client::DirectoryServiceClient].
     /// panics if called outside the context of a tokio runtime.
     pub fn from_client(
-        grpc_client: proto::directory_service_client::DirectoryServiceClient<Channel>,
+        grpc_client: proto::directory_service_client::DirectoryServiceClient<T>,
     ) -> Self {
         Self { grpc_client }
     }
 }
 
 #[async_trait]
-impl DirectoryService for GRPCDirectoryService {
+impl<T> DirectoryService for GRPCDirectoryService<T>
+where
+    T: tonic::client::GrpcService<tonic::body::BoxBody> + Send + Sync + Clone + 'static,
+    T::ResponseBody: tonic::codegen::Body<Data = tonic::codegen::Bytes> + Send + 'static,
+    <T::ResponseBody as tonic::codegen::Body>::Error: Into<tonic::codegen::StdError> + Send,
+    T::Future: Send,
+{
     #[instrument(level = "trace", skip_all, fields(directory.digest = %digest))]
-    async fn get(
-        &self,
-        digest: &B3Digest,
-    ) -> Result<Option<crate::proto::Directory>, crate::Error> {
+    async fn get(&self, digest: &B3Digest) -> Result<Option<Directory>, crate::Error> {
         // Get a new handle to the gRPC client, and copy the digest.
         let mut grpc_client = self.grpc_client.clone();
         let digest_cpy = digest.clone();
@@ -66,15 +69,10 @@ impl DirectoryService for GRPCDirectoryService {
                         "requested directory with digest {}, but got {}",
                         digest, actual_digest
                     )))
-                } else if let Err(e) = directory.validate() {
-                    // Validate the Directory itself is valid.
-                    warn!("directory failed validation: {}", e.to_string());
-                    Err(crate::Error::StorageError(format!(
-                        "directory {} failed validation: {}",
-                        digest, e,
-                    )))
                 } else {
-                    Ok(Some(directory))
+                    Ok(Some(directory.try_into().map_err(|_| {
+                        Error::StorageError("invalid root digest length in response".to_string())
+                    })?))
                 }
             }
             Ok(None) => Ok(None),
@@ -84,11 +82,11 @@ impl DirectoryService for GRPCDirectoryService {
     }
 
     #[instrument(level = "trace", skip_all, fields(directory.digest = %directory.digest()))]
-    async fn put(&self, directory: crate::proto::Directory) -> Result<B3Digest, crate::Error> {
+    async fn put(&self, directory: Directory) -> Result<B3Digest, crate::Error> {
         let resp = self
             .grpc_client
             .clone()
-            .put(tokio_stream::once(directory))
+            .put(tokio_stream::once(proto::Directory::from(directory)))
             .await;
 
         match resp {
@@ -107,7 +105,7 @@ impl DirectoryService for GRPCDirectoryService {
     fn get_recursive(
         &self,
         root_directory_digest: &B3Digest,
-    ) -> BoxStream<'static, Result<proto::Directory, Error>> {
+    ) -> BoxStream<'static, Result<Directory, Error>> {
         let mut grpc_client = self.grpc_client.clone();
         let root_directory_digest = root_directory_digest.clone();
 
@@ -124,19 +122,11 @@ impl DirectoryService for GRPCDirectoryService {
             // The Directory digests we received so far
             let mut received_directory_digests: HashSet<B3Digest> = HashSet::new();
             // The Directory digests we're still expecting to get sent.
-            let mut expected_directory_digests: HashSet<B3Digest> = HashSet::from([root_directory_digest]);
+            let mut expected_directory_digests: HashSet<B3Digest> = HashSet::from([root_directory_digest.clone()]);
 
             loop {
                 match stream.message().await {
                     Ok(Some(directory)) => {
-                        // validate the directory itself.
-                        if let Err(e) = directory.validate() {
-                            Err(crate::Error::StorageError(format!(
-                                "directory {} failed validation: {}",
-                                directory.digest(),
-                                e,
-                            )))?;
-                        }
                         // validate we actually expected that directory, and move it from expected to received.
                         let directory_digest = directory.digest();
                         let was_expected = expected_directory_digests.remove(&directory_digest);
@@ -162,14 +152,28 @@ impl DirectoryService for GRPCDirectoryService {
                                 .insert(child_directory_digest);
                         }
 
+                        let directory = directory.try_into()
+                            .map_err(|e: DirectoryError| Error::StorageError(e.to_string()))?;
+
                         yield directory;
                     },
+                    Ok(None) if expected_directory_digests.len() == 1 && expected_directory_digests.contains(&root_directory_digest) => {
+                        // The root directory of the requested closure was not found, return an
+                        // empty stream
+                        return
+                    }
                     Ok(None) => {
-                        // If we were still expecting something, that's an error.
-                        if !expected_directory_digests.is_empty() {
+                        // The stream has ended
+                        let diff_len = expected_directory_digests
+                            // Account for directories which have been referenced more than once,
+                            // but only received once since they were deduplicated
+                            .difference(&received_directory_digests)
+                            .count();
+                        // If this is not empty, then the closure is incomplete
+                        if diff_len != 0 {
                             Err(crate::Error::StorageError(format!(
                                 "still expected {} directories, but got premature end of stream",
-                                expected_directory_digests.len(),
+                                diff_len
                             )))?
                         } else {
                             return
@@ -194,14 +198,17 @@ impl DirectoryService for GRPCDirectoryService {
 
         let (tx, rx) = tokio::sync::mpsc::unbounded_channel();
 
-        let task: JoinHandle<Result<proto::PutDirectoryResponse, Status>> = spawn(async move {
-            let s = grpc_client
-                .put(UnboundedReceiverStream::new(rx))
-                .await?
-                .into_inner();
+        let task: JoinHandle<Result<proto::PutDirectoryResponse, Status>> = spawn(
+            async move {
+                let s = grpc_client
+                    .put(UnboundedReceiverStream::new(rx))
+                    .await?
+                    .into_inner();
 
-            Ok(s)
-        });
+                Ok(s)
+            } // instrument the task with the current span, this is not done by default
+            .in_current_span(),
+        );
 
         Box::new(GRPCPutter {
             rq: Some((task, tx)),
@@ -209,6 +216,40 @@ impl DirectoryService for GRPCDirectoryService {
     }
 }
 
+#[derive(serde::Deserialize, Debug)]
+#[serde(deny_unknown_fields)]
+pub struct GRPCDirectoryServiceConfig {
+    url: String,
+}
+
+impl TryFrom<url::Url> for GRPCDirectoryServiceConfig {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    fn try_from(url: url::Url) -> Result<Self, Self::Error> {
+        //   This is normally grpc+unix for unix sockets, and grpc+http(s) for the HTTP counterparts.
+        // - In the case of unix sockets, there must be a path, but may not be a host.
+        // - In the case of non-unix sockets, there must be a host, but no path.
+        // Constructing the channel is handled by tvix_castore::channel::from_url.
+        Ok(GRPCDirectoryServiceConfig {
+            url: url.to_string(),
+        })
+    }
+}
+
+#[async_trait]
+impl ServiceBuilder for GRPCDirectoryServiceConfig {
+    type Output = dyn DirectoryService;
+    async fn build<'a>(
+        &'a self,
+        _instance_name: &str,
+        _context: &CompositionContext,
+    ) -> Result<Arc<dyn DirectoryService>, Box<dyn std::error::Error + Send + Sync + 'static>> {
+        let client = proto::directory_service_client::DirectoryServiceClient::new(
+            crate::tonic::channel_from_url(&self.url.parse()?).await?,
+        );
+        Ok(Arc::new(GRPCDirectoryService::from_client(client)))
+    }
+}
+
 /// Allows uploading multiple Directory messages in the same gRPC stream.
 pub struct GRPCPutter {
     /// Data about the current request - a handle to the task, and the tx part
@@ -225,11 +266,11 @@ pub struct GRPCPutter {
 #[async_trait]
 impl DirectoryPutter for GRPCPutter {
     #[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest()), err)]
-    async fn put(&mut self, directory: proto::Directory) -> Result<(), crate::Error> {
+    async fn put(&mut self, directory: Directory) -> Result<(), crate::Error> {
         match self.rq {
             // If we're not already closed, send the directory to directory_sender.
             Some((_, ref directory_sender)) => {
-                if directory_sender.send(directory).is_err() {
+                if directory_sender.send(directory.into()).is_err() {
                     // If the channel has been prematurely closed, invoke close (so we can peek at the error code)
                     // That error code is much more helpful, because it
                     // contains the error message from the server.
diff --git a/tvix/castore/src/directoryservice/memory.rs b/tvix/castore/src/directoryservice/memory.rs
index 3b2795c3968c..b039d9bc7d84 100644
--- a/tvix/castore/src/directoryservice/memory.rs
+++ b/tvix/castore/src/directoryservice/memory.rs
@@ -1,4 +1,4 @@
-use crate::{proto, B3Digest, Error};
+use crate::{B3Digest, Error};
 use futures::stream::BoxStream;
 use std::collections::HashMap;
 use std::sync::Arc;
@@ -7,7 +7,9 @@ use tonic::async_trait;
 use tracing::{instrument, warn};
 
 use super::utils::traverse_directory;
-use super::{DirectoryPutter, DirectoryService, SimplePutter};
+use super::{Directory, DirectoryPutter, DirectoryService, SimplePutter};
+use crate::composition::{CompositionContext, ServiceBuilder};
+use crate::proto;
 
 #[derive(Clone, Default)]
 pub struct MemoryDirectoryService {
@@ -17,7 +19,7 @@ pub struct MemoryDirectoryService {
 #[async_trait]
 impl DirectoryService for MemoryDirectoryService {
     #[instrument(skip(self, digest), fields(directory.digest = %digest))]
-    async fn get(&self, digest: &B3Digest) -> Result<Option<proto::Directory>, Error> {
+    async fn get(&self, digest: &B3Digest) -> Result<Option<Directory>, Error> {
         let db = self.db.read().await;
 
         match db.get(digest) {
@@ -36,35 +38,20 @@ impl DirectoryService for MemoryDirectoryService {
                     )));
                 }
 
-                // Validate the Directory itself is valid.
-                if let Err(e) = directory.validate() {
-                    warn!("directory failed validation: {}", e.to_string());
-                    return Err(Error::StorageError(format!(
-                        "directory {} failed validation: {}",
-                        actual_digest, e,
-                    )));
-                }
-
-                Ok(Some(directory.clone()))
+                Ok(Some(directory.clone().try_into().map_err(|e| {
+                    crate::Error::StorageError(format!("corrupted directory: {}", e))
+                })?))
             }
         }
     }
 
     #[instrument(skip(self, directory), fields(directory.digest = %directory.digest()))]
-    async fn put(&self, directory: proto::Directory) -> Result<B3Digest, Error> {
+    async fn put(&self, directory: Directory) -> Result<B3Digest, Error> {
         let digest = directory.digest();
 
-        // validate the directory itself.
-        if let Err(e) = directory.validate() {
-            return Err(Error::InvalidRequest(format!(
-                "directory {} failed validation: {}",
-                digest, e,
-            )));
-        }
-
         // store it
         let mut db = self.db.write().await;
-        db.insert(digest.clone(), directory);
+        db.insert(digest.clone(), directory.into());
 
         Ok(digest)
     }
@@ -73,7 +60,7 @@ impl DirectoryService for MemoryDirectoryService {
     fn get_recursive(
         &self,
         root_directory_digest: &B3Digest,
-    ) -> BoxStream<'static, Result<proto::Directory, Error>> {
+    ) -> BoxStream<'static, Result<Directory, Error>> {
         traverse_directory(self.clone(), root_directory_digest)
     }
 
@@ -85,3 +72,30 @@ impl DirectoryService for MemoryDirectoryService {
         Box::new(SimplePutter::new(self.clone()))
     }
 }
+
+#[derive(serde::Deserialize, Debug)]
+#[serde(deny_unknown_fields)]
+pub struct MemoryDirectoryServiceConfig {}
+
+impl TryFrom<url::Url> for MemoryDirectoryServiceConfig {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    fn try_from(url: url::Url) -> Result<Self, Self::Error> {
+        // memory doesn't support host or path in the URL.
+        if url.has_host() || !url.path().is_empty() {
+            return Err(Error::StorageError("invalid url".to_string()).into());
+        }
+        Ok(MemoryDirectoryServiceConfig {})
+    }
+}
+
+#[async_trait]
+impl ServiceBuilder for MemoryDirectoryServiceConfig {
+    type Output = dyn DirectoryService;
+    async fn build<'a>(
+        &'a self,
+        _instance_name: &str,
+        _context: &CompositionContext,
+    ) -> Result<Arc<dyn DirectoryService>, Box<dyn std::error::Error + Send + Sync + 'static>> {
+        Ok(Arc::new(MemoryDirectoryService::default()))
+    }
+}
diff --git a/tvix/castore/src/directoryservice/mod.rs b/tvix/castore/src/directoryservice/mod.rs
index 3f180ef162d8..25162e4de853 100644
--- a/tvix/castore/src/directoryservice/mod.rs
+++ b/tvix/castore/src/directoryservice/mod.rs
@@ -1,12 +1,16 @@
-use crate::{proto, B3Digest, Error};
+use crate::composition::{Registry, ServiceBuilder};
+use crate::{B3Digest, Directory, Error};
+
 use futures::stream::BoxStream;
 use tonic::async_trait;
-
-mod closure_validator;
+mod combinators;
+mod directory_graph;
 mod from_addr;
 mod grpc;
 mod memory;
 mod object_store;
+mod order_validator;
+mod redb;
 mod simple_putter;
 mod sled;
 #[cfg(test)]
@@ -14,13 +18,16 @@ pub mod tests;
 mod traverse;
 mod utils;
 
-pub use self::closure_validator::ClosureValidator;
+pub use self::combinators::{Cache, CacheConfig};
+pub use self::directory_graph::{DirectoryGraph, ValidatedDirectoryGraph};
 pub use self::from_addr::from_addr;
-pub use self::grpc::GRPCDirectoryService;
-pub use self::memory::MemoryDirectoryService;
-pub use self::object_store::ObjectStoreDirectoryService;
+pub use self::grpc::{GRPCDirectoryService, GRPCDirectoryServiceConfig};
+pub use self::memory::{MemoryDirectoryService, MemoryDirectoryServiceConfig};
+pub use self::object_store::{ObjectStoreDirectoryService, ObjectStoreDirectoryServiceConfig};
+pub use self::order_validator::{LeavesToRootValidator, OrderValidator, RootToLeavesValidator};
+pub use self::redb::{RedbDirectoryService, RedbDirectoryServiceConfig};
 pub use self::simple_putter::SimplePutter;
-pub use self::sled::SledDirectoryService;
+pub use self::sled::{SledDirectoryService, SledDirectoryServiceConfig};
 pub use self::traverse::descend_to;
 pub use self::utils::traverse_directory;
 
@@ -28,10 +35,10 @@ pub use self::utils::traverse_directory;
 mod bigtable;
 
 #[cfg(feature = "cloud")]
-pub use self::bigtable::BigtableDirectoryService;
+pub use self::bigtable::{BigtableDirectoryService, BigtableParameters};
 
 /// The base trait all Directory services need to implement.
-/// This is a simple get and put of [crate::proto::Directory], returning their
+/// This is a simple get and put of [Directory], returning their
 /// digest.
 #[async_trait]
 pub trait DirectoryService: Send + Sync {
@@ -43,14 +50,14 @@ pub trait DirectoryService: Send + Sync {
     /// Directory digests that are at the "root", aka the last element that's
     /// sent to a DirectoryPutter. This makes sense for implementations bundling
     /// closures of directories together in batches.
-    async fn get(&self, digest: &B3Digest) -> Result<Option<proto::Directory>, Error>;
+    async fn get(&self, digest: &B3Digest) -> Result<Option<Directory>, Error>;
     /// Uploads a single Directory message, and returns the calculated
     /// digest, or an error. An error *must* also be returned if the message is
     /// not valid.
-    async fn put(&self, directory: proto::Directory) -> Result<B3Digest, Error>;
+    async fn put(&self, directory: Directory) -> Result<B3Digest, Error>;
 
-    /// Looks up a closure of [proto::Directory].
-    /// Ideally this would be a `impl Stream<Item = Result<proto::Directory, Error>>`,
+    /// Looks up a closure of [Directory].
+    /// Ideally this would be a `impl Stream<Item = Result<Directory, Error>>`,
     /// and we'd be able to add a default implementation for it here, but
     /// we can't have that yet.
     ///
@@ -63,12 +70,14 @@ pub trait DirectoryService: Send + Sync {
     /// Directories are sent in an order from the root to the leaves, so that
     /// the receiving side can validate each message to be a connected to the root
     /// that has initially been requested.
+    ///
+    /// In case the directory can not be found, this should return an empty stream.
     fn get_recursive(
         &self,
         root_directory_digest: &B3Digest,
-    ) -> BoxStream<'static, Result<proto::Directory, Error>>;
+    ) -> BoxStream<'static, Result<Directory, Error>>;
 
-    /// Allows persisting a closure of [proto::Directory], which is a graph of
+    /// Allows persisting a closure of [Directory], which is a graph of
     /// connected Directory messages.
     fn put_multiple_start(&self) -> Box<dyn DirectoryPutter>;
 }
@@ -78,18 +87,18 @@ impl<A> DirectoryService for A
 where
     A: AsRef<dyn DirectoryService> + Send + Sync,
 {
-    async fn get(&self, digest: &B3Digest) -> Result<Option<proto::Directory>, Error> {
+    async fn get(&self, digest: &B3Digest) -> Result<Option<Directory>, Error> {
         self.as_ref().get(digest).await
     }
 
-    async fn put(&self, directory: proto::Directory) -> Result<B3Digest, Error> {
+    async fn put(&self, directory: Directory) -> Result<B3Digest, Error> {
         self.as_ref().put(directory).await
     }
 
     fn get_recursive(
         &self,
         root_directory_digest: &B3Digest,
-    ) -> BoxStream<'static, Result<proto::Directory, Error>> {
+    ) -> BoxStream<'static, Result<Directory, Error>> {
         self.as_ref().get_recursive(root_directory_digest)
     }
 
@@ -98,7 +107,7 @@ where
     }
 }
 
-/// Provides a handle to put a closure of connected [proto::Directory] elements.
+/// Provides a handle to put a closure of connected [Directory] elements.
 ///
 /// The consumer can periodically call [DirectoryPutter::put], starting from the
 /// leaves. Once the root is reached, [DirectoryPutter::close] can be called to
@@ -110,15 +119,29 @@ where
 /// but a single file or symlink.
 #[async_trait]
 pub trait DirectoryPutter: Send {
-    /// Put a individual [proto::Directory] into the store.
+    /// Put a individual [Directory] into the store.
     /// Error semantics and behaviour is up to the specific implementation of
     /// this trait.
     /// Due to bursting, the returned error might refer to an object previously
     /// sent via `put`.
-    async fn put(&mut self, directory: proto::Directory) -> Result<(), Error>;
+    async fn put(&mut self, directory: Directory) -> Result<(), Error>;
 
     /// Close the stream, and wait for any errors.
     /// If there's been any invalid Directory message uploaded, and error *must*
     /// be returned.
     async fn close(&mut self) -> Result<B3Digest, Error>;
 }
+
+/// Registers the builtin DirectoryService implementations with the registry
+pub(crate) fn register_directory_services(reg: &mut Registry) {
+    reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::ObjectStoreDirectoryServiceConfig>("objectstore");
+    reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::MemoryDirectoryServiceConfig>("memory");
+    reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::CacheConfig>("cache");
+    reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::GRPCDirectoryServiceConfig>("grpc");
+    reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::SledDirectoryServiceConfig>("sled");
+    reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::RedbDirectoryServiceConfig>("redb");
+    #[cfg(feature = "cloud")]
+    {
+        reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::BigtableParameters>("bigtable");
+    }
+}
diff --git a/tvix/castore/src/directoryservice/object_store.rs b/tvix/castore/src/directoryservice/object_store.rs
index 64ce335edb86..5b5281abcd2f 100644
--- a/tvix/castore/src/directoryservice/object_store.rs
+++ b/tvix/castore/src/directoryservice/object_store.rs
@@ -1,4 +1,4 @@
-use std::collections::HashSet;
+use std::collections::HashMap;
 use std::sync::Arc;
 
 use data_encoding::HEXLOWER;
@@ -16,8 +16,12 @@ use tonic::async_trait;
 use tracing::{instrument, trace, warn, Level};
 use url::Url;
 
-use super::{ClosureValidator, DirectoryPutter, DirectoryService};
-use crate::{proto, B3Digest, Error};
+use super::{
+    Directory, DirectoryGraph, DirectoryPutter, DirectoryService, LeavesToRootValidator,
+    RootToLeavesValidator,
+};
+use crate::composition::{CompositionContext, ServiceBuilder};
+use crate::{proto, B3Digest, Error, Node};
 
 /// Stores directory closures in an object store.
 /// Notably, this makes use of the option to disallow accessing child directories except when
@@ -45,7 +49,7 @@ fn derive_dirs_path(base_path: &Path, digest: &B3Digest) -> Path {
 const MAX_FRAME_LENGTH: usize = 1 * 1024 * 1024 * 1000; // 1 MiB
                                                         //
 impl ObjectStoreDirectoryService {
-    /// Constructs a new [ObjectStoreBlobService] from a [Url] supported by
+    /// Constructs a new [ObjectStoreDirectoryService] from a [Url] supported by
     /// [object_store].
     /// Any path suffix becomes the base path of the object store.
     /// additional options, the same as in [object_store::parse_url_opts] can
@@ -75,13 +79,17 @@ impl DirectoryService for ObjectStoreDirectoryService {
     /// This is the same steps as for get_recursive anyways, so we just call get_recursive and
     /// return the first element of the stream and drop the request.
     #[instrument(skip(self, digest), fields(directory.digest = %digest))]
-    async fn get(&self, digest: &B3Digest) -> Result<Option<proto::Directory>, Error> {
+    async fn get(&self, digest: &B3Digest) -> Result<Option<Directory>, Error> {
         self.get_recursive(digest).take(1).next().await.transpose()
     }
 
     #[instrument(skip(self, directory), fields(directory.digest = %directory.digest()))]
-    async fn put(&self, directory: proto::Directory) -> Result<B3Digest, Error> {
-        if !directory.directories.is_empty() {
+    async fn put(&self, directory: Directory) -> Result<B3Digest, Error> {
+        // Ensure the directory doesn't contain other directory children
+        if directory
+            .nodes()
+            .any(|(_, e)| matches!(e, Node::Directory { .. }))
+        {
             return Err(Error::InvalidRequest(
                     "only put_multiple_start is supported by the ObjectStoreDirectoryService for directories with children".into(),
             ));
@@ -96,10 +104,11 @@ impl DirectoryService for ObjectStoreDirectoryService {
     fn get_recursive(
         &self,
         root_directory_digest: &B3Digest,
-    ) -> BoxStream<'static, Result<proto::Directory, Error>> {
-        // The Directory digests we're expecting to receive.
-        let mut expected_directory_digests: HashSet<B3Digest> =
-            HashSet::from([root_directory_digest.clone()]);
+    ) -> BoxStream<'static, Result<Directory, Error>> {
+        // Check that we are not passing on bogus from the object store to the client, and that the
+        // trust chain from the root digest to the leaves is intact
+        let mut order_validator =
+            RootToLeavesValidator::new_with_root_digest(root_directory_digest.clone());
 
         let dir_path = derive_dirs_path(&self.base_path, root_directory_digest);
         let object_store = self.object_store.clone();
@@ -130,8 +139,7 @@ impl DirectoryService for ObjectStoreDirectoryService {
                         let digest: B3Digest = hasher.update(&buf).finalize().as_bytes().into();
 
                         // Ensure to only decode the directory objects whose digests we trust
-                        let was_expected = expected_directory_digests.remove(&digest);
-                        if !was_expected {
+                        if !order_validator.digest_allowed(&digest) {
                             return Err(crate::Error::StorageError(format!(
                                 "received unexpected directory {}",
                                 digest
@@ -142,14 +150,13 @@ impl DirectoryService for ObjectStoreDirectoryService {
                             warn!("unable to parse directory {}: {}", digest, e);
                             Error::StorageError(e.to_string())
                         })?;
+                        let directory = Directory::try_from(directory).map_err(|e| {
+                            warn!("unable to convert directory {}: {}", digest, e);
+                            Error::StorageError(e.to_string())
+                        })?;
 
-                        for directory in &directory.directories {
-                            // Allow the children to appear next
-                            expected_directory_digests.insert(
-                                B3Digest::try_from(directory.digest.clone())
-                                    .map_err(|e| Error::StorageError(e.to_string()))?,
-                            );
-                        }
+                        // Allow the children to appear next
+                        order_validator.add_directory_unchecked(&directory);
 
                         Ok(directory)
                     })())
@@ -173,11 +180,64 @@ impl DirectoryService for ObjectStoreDirectoryService {
     }
 }
 
+#[derive(serde::Deserialize)]
+#[serde(deny_unknown_fields)]
+pub struct ObjectStoreDirectoryServiceConfig {
+    object_store_url: String,
+    #[serde(default)]
+    object_store_options: HashMap<String, String>,
+}
+
+impl TryFrom<url::Url> for ObjectStoreDirectoryServiceConfig {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    fn try_from(url: url::Url) -> Result<Self, Self::Error> {
+        // We need to convert the URL to string, strip the prefix there, and then
+        // parse it back as url, as Url::set_scheme() rejects some of the transitions we want to do.
+        let trimmed_url = {
+            let s = url.to_string();
+            let mut url = Url::parse(
+                s.strip_prefix("objectstore+")
+                    .ok_or(Error::StorageError("Missing objectstore uri".into()))?,
+            )?;
+            // trim the query pairs, they might contain credentials or local settings we don't want to send as-is.
+            url.set_query(None);
+            url
+        };
+        Ok(ObjectStoreDirectoryServiceConfig {
+            object_store_url: trimmed_url.into(),
+            object_store_options: url
+                .query_pairs()
+                .into_iter()
+                .map(|(k, v)| (k.to_string(), v.to_string()))
+                .collect(),
+        })
+    }
+}
+
+#[async_trait]
+impl ServiceBuilder for ObjectStoreDirectoryServiceConfig {
+    type Output = dyn DirectoryService;
+    async fn build<'a>(
+        &'a self,
+        _instance_name: &str,
+        _context: &CompositionContext,
+    ) -> Result<Arc<dyn DirectoryService>, Box<dyn std::error::Error + Send + Sync + 'static>> {
+        let (object_store, path) = object_store::parse_url_opts(
+            &self.object_store_url.parse()?,
+            &self.object_store_options,
+        )?;
+        Ok(Arc::new(ObjectStoreDirectoryService {
+            object_store: Arc::new(object_store),
+            base_path: path,
+        }))
+    }
+}
+
 struct ObjectStoreDirectoryPutter {
     object_store: Arc<dyn ObjectStore>,
     base_path: Path,
 
-    directory_validator: Option<ClosureValidator>,
+    directory_validator: Option<DirectoryGraph<LeavesToRootValidator>>,
 }
 
 impl ObjectStoreDirectoryPutter {
@@ -193,11 +253,13 @@ impl ObjectStoreDirectoryPutter {
 #[async_trait]
 impl DirectoryPutter for ObjectStoreDirectoryPutter {
     #[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest()), err)]
-    async fn put(&mut self, directory: proto::Directory) -> Result<(), Error> {
+    async fn put(&mut self, directory: Directory) -> Result<(), Error> {
         match self.directory_validator {
             None => return Err(Error::StorageError("already closed".to_string())),
             Some(ref mut validator) => {
-                validator.add(directory)?;
+                validator
+                    .add(directory)
+                    .map_err(|e| Error::StorageError(e.to_string()))?;
             }
         }
 
@@ -214,7 +276,11 @@ impl DirectoryPutter for ObjectStoreDirectoryPutter {
         // retrieve the validated directories.
         // It is important that they are in topological order (root first),
         // since that's how we want to retrieve them from the object store in the end.
-        let directories = validator.finalize_root_to_leaves()?;
+        let directories = validator
+            .validate()
+            .map_err(|e| Error::StorageError(e.to_string()))?
+            .drain_root_to_leaves()
+            .collect::<Vec<_>>();
 
         // Get the root digest
         let root_digest = directories
@@ -245,7 +311,7 @@ impl DirectoryPutter for ObjectStoreDirectoryPutter {
 
                 for directory in directories {
                     directories_sink
-                        .send(directory.encode_to_vec().into())
+                        .send(proto::Directory::from(directory).encode_to_vec().into())
                         .await?;
                 }
 
diff --git a/tvix/castore/src/directoryservice/order_validator.rs b/tvix/castore/src/directoryservice/order_validator.rs
new file mode 100644
index 000000000000..973af92e1294
--- /dev/null
+++ b/tvix/castore/src/directoryservice/order_validator.rs
@@ -0,0 +1,188 @@
+use std::collections::HashSet;
+use tracing::warn;
+
+use super::Directory;
+use crate::{B3Digest, Node};
+
+pub trait OrderValidator {
+    /// Update the order validator's state with the directory
+    /// Returns whether the directory was accepted
+    fn add_directory(&mut self, directory: &Directory) -> bool;
+}
+
+#[derive(Default)]
+/// Validates that newly introduced directories are already referenced from
+/// the root via existing directories.
+/// Commonly used when _receiving_ a directory closure _from_ a store.
+pub struct RootToLeavesValidator {
+    /// Only used to remember the root node, not for validation
+    expected_digests: HashSet<B3Digest>,
+}
+
+impl RootToLeavesValidator {
+    /// Use to validate the root digest of the closure upon receiving the first
+    /// directory.
+    pub fn new_with_root_digest(root_digest: B3Digest) -> Self {
+        let mut this = Self::default();
+        this.expected_digests.insert(root_digest);
+        this
+    }
+
+    /// Checks if a directory is in-order based on its digest.
+    ///
+    /// Particularly useful when receiving directories in canonical protobuf
+    /// encoding, so that directories not connected to the root can be rejected
+    /// without parsing.
+    ///
+    /// After parsing, the directory must be passed to `add_directory_unchecked`
+    /// to add its children to the list of expected digests.
+    pub fn digest_allowed(&self, digest: &B3Digest) -> bool {
+        self.expected_digests.is_empty() // we don't know the root node; allow any
+            || self.expected_digests.contains(digest)
+    }
+
+    /// Update the order validator's state with the directory
+    pub fn add_directory_unchecked(&mut self, directory: &Directory) {
+        // No initial root was specified and this is the first directory
+        if self.expected_digests.is_empty() {
+            self.expected_digests.insert(directory.digest());
+        }
+
+        // Allow the children to appear next
+        for (_, node) in directory.nodes() {
+            if let Node::Directory { digest, .. } = node {
+                self.expected_digests.insert(digest.clone());
+            }
+        }
+    }
+}
+
+impl OrderValidator for RootToLeavesValidator {
+    fn add_directory(&mut self, directory: &Directory) -> bool {
+        if !self.digest_allowed(&directory.digest()) {
+            return false;
+        }
+        self.add_directory_unchecked(directory);
+        true
+    }
+}
+
+#[derive(Default)]
+/// Validates that newly uploaded directories only reference directories which
+/// have already been introduced.
+/// Commonly used when _uploading_ a directory closure _to_ a store.
+pub struct LeavesToRootValidator {
+    /// This is empty in the beginning, and gets filled as leaves and intermediates are
+    /// inserted
+    allowed_references: HashSet<B3Digest>,
+}
+
+impl OrderValidator for LeavesToRootValidator {
+    fn add_directory(&mut self, directory: &Directory) -> bool {
+        let digest = directory.digest();
+
+        for (_, node) in directory.nodes() {
+            if let Node::Directory {
+                digest: subdir_node_digest,
+                ..
+            } = node
+            {
+                if !self.allowed_references.contains(subdir_node_digest) {
+                    warn!(
+                        directory.digest = %digest,
+                        subdirectory.digest = %subdir_node_digest,
+                        "unexpected directory reference"
+                    );
+                    return false;
+                }
+            }
+        }
+
+        self.allowed_references.insert(digest.clone());
+
+        true
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::{LeavesToRootValidator, RootToLeavesValidator};
+    use crate::directoryservice::order_validator::OrderValidator;
+    use crate::directoryservice::Directory;
+    use crate::fixtures::{DIRECTORY_A, DIRECTORY_B, DIRECTORY_C};
+    use rstest::rstest;
+
+    #[rstest]
+    /// Uploading an empty directory should succeed.
+    #[case::empty_directory(&[&*DIRECTORY_A], false)]
+    /// Uploading A, then B (referring to A) should succeed.
+    #[case::simple_closure(&[&*DIRECTORY_A, &*DIRECTORY_B], false)]
+    /// Uploading A, then A, then C (referring to A twice) should succeed.
+    /// We pretend to be a dumb client not deduping directories.
+    #[case::same_child(&[&*DIRECTORY_A, &*DIRECTORY_A, &*DIRECTORY_C], false)]
+    /// Uploading A, then C (referring to A twice) should succeed.
+    #[case::same_child_dedup(&[&*DIRECTORY_A, &*DIRECTORY_C], false)]
+    /// Uploading A, then C (referring to A twice), then B (itself referring to A) should fail during close,
+    /// as B itself would be left unconnected.
+    #[case::unconnected_node(&[&*DIRECTORY_A, &*DIRECTORY_C, &*DIRECTORY_B], false)]
+    /// Uploading B (referring to A) should fail immediately, because A was never uploaded.
+    #[case::dangling_pointer(&[&*DIRECTORY_B], true)]
+    fn leaves_to_root(
+        #[case] directories_to_upload: &[&Directory],
+        #[case] exp_fail_upload_last: bool,
+    ) {
+        let mut validator = LeavesToRootValidator::default();
+        let len_directories_to_upload = directories_to_upload.len();
+
+        for (i, d) in directories_to_upload.iter().enumerate() {
+            let resp = validator.add_directory(d);
+            if i == len_directories_to_upload - 1 && exp_fail_upload_last {
+                assert!(!resp, "expect last put to fail");
+
+                // We don't really care anymore what finalize() would return, as
+                // the add() failed.
+                return;
+            } else {
+                assert!(resp, "expect put to succeed");
+            }
+        }
+    }
+
+    #[rstest]
+    /// Downloading an empty directory should succeed.
+    #[case::empty_directory(&*DIRECTORY_A, &[&*DIRECTORY_A], false)]
+    /// Downlading B, then A (referenced by B) should succeed.
+    #[case::simple_closure(&*DIRECTORY_B, &[&*DIRECTORY_B, &*DIRECTORY_A], false)]
+    /// Downloading C (referring to A twice), then A should succeed.
+    #[case::same_child_dedup(&*DIRECTORY_C, &[&*DIRECTORY_C, &*DIRECTORY_A], false)]
+    /// Downloading C, then B (both referring to A but not referring to each other) should fail immediately as B has no connection to C (the root)
+    #[case::unconnected_node(&*DIRECTORY_C, &[&*DIRECTORY_C, &*DIRECTORY_B], true)]
+    /// Downloading B (specified as the root) but receiving A instead should fail immediately, because A has no connection to B (the root).
+    #[case::dangling_pointer(&*DIRECTORY_B, &[&*DIRECTORY_A], true)]
+    fn root_to_leaves(
+        #[case] root: &Directory,
+        #[case] directories_to_upload: &[&Directory],
+        #[case] exp_fail_upload_last: bool,
+    ) {
+        let mut validator = RootToLeavesValidator::new_with_root_digest(root.digest());
+        let len_directories_to_upload = directories_to_upload.len();
+
+        for (i, d) in directories_to_upload.iter().enumerate() {
+            let resp1 = validator.digest_allowed(&d.digest());
+            let resp = validator.add_directory(d);
+            assert_eq!(
+                resp1, resp,
+                "digest_allowed should return the same value as add_directory"
+            );
+            if i == len_directories_to_upload - 1 && exp_fail_upload_last {
+                assert!(!resp, "expect last put to fail");
+
+                // We don't really care anymore what finalize() would return, as
+                // the add() failed.
+                return;
+            } else {
+                assert!(resp, "expect put to succeed");
+            }
+        }
+    }
+}
diff --git a/tvix/castore/src/directoryservice/redb.rs b/tvix/castore/src/directoryservice/redb.rs
new file mode 100644
index 000000000000..d253df503bb3
--- /dev/null
+++ b/tvix/castore/src/directoryservice/redb.rs
@@ -0,0 +1,303 @@
+use futures::stream::BoxStream;
+use prost::Message;
+use redb::{Database, TableDefinition};
+use std::{path::PathBuf, sync::Arc};
+use tonic::async_trait;
+use tracing::{instrument, warn};
+
+use super::{
+    traverse_directory, Directory, DirectoryGraph, DirectoryPutter, DirectoryService,
+    LeavesToRootValidator,
+};
+use crate::{
+    composition::{CompositionContext, ServiceBuilder},
+    digests, proto, B3Digest, Error,
+};
+
+const DIRECTORY_TABLE: TableDefinition<[u8; digests::B3_LEN], Vec<u8>> =
+    TableDefinition::new("directory");
+
+#[derive(Clone)]
+pub struct RedbDirectoryService {
+    // We wrap the db in an Arc to be able to move it into spawn_blocking,
+    // as discussed in https://github.com/cberner/redb/issues/789
+    db: Arc<Database>,
+}
+
+impl RedbDirectoryService {
+    /// Constructs a new instance using the specified filesystem path for
+    /// storage.
+    pub async fn new(path: PathBuf) -> Result<Self, Error> {
+        if path == PathBuf::from("/") {
+            return Err(Error::StorageError(
+                "cowardly refusing to open / with redb".to_string(),
+            ));
+        }
+
+        let db = tokio::task::spawn_blocking(|| -> Result<_, redb::Error> {
+            let db = redb::Database::create(path)?;
+            create_schema(&db)?;
+            Ok(db)
+        })
+        .await??;
+
+        Ok(Self { db: Arc::new(db) })
+    }
+
+    /// Constructs a new instance using the in-memory backend.
+    pub fn new_temporary() -> Result<Self, Error> {
+        let db =
+            redb::Database::builder().create_with_backend(redb::backends::InMemoryBackend::new())?;
+
+        create_schema(&db)?;
+
+        Ok(Self { db: Arc::new(db) })
+    }
+}
+
+/// Ensures all tables are present.
+/// Opens a write transaction and calls open_table on DIRECTORY_TABLE, which will
+/// create it if not present.
+fn create_schema(db: &redb::Database) -> Result<(), redb::Error> {
+    let txn = db.begin_write()?;
+    txn.open_table(DIRECTORY_TABLE)?;
+    txn.commit()?;
+
+    Ok(())
+}
+
+#[async_trait]
+impl DirectoryService for RedbDirectoryService {
+    #[instrument(skip(self, digest), fields(directory.digest = %digest))]
+    async fn get(&self, digest: &B3Digest) -> Result<Option<Directory>, Error> {
+        let db = self.db.clone();
+
+        // Retrieves the protobuf-encoded Directory for the corresponding digest.
+        let db_get_resp = tokio::task::spawn_blocking({
+            let digest_as_array: [u8; digests::B3_LEN] = digest.to_owned().into();
+            move || -> Result<_, redb::Error> {
+                let txn = db.begin_read()?;
+                let table = txn.open_table(DIRECTORY_TABLE)?;
+                Ok(table.get(digest_as_array)?)
+            }
+        })
+        .await?
+        .map_err(|e| {
+            warn!(err=%e, "failed to retrieve Directory");
+            Error::StorageError("failed to retrieve Directory".to_string())
+        })?;
+
+        // The Directory was not found, return None.
+        let directory_data = match db_get_resp {
+            None => return Ok(None),
+            Some(d) => d,
+        };
+
+        // We check that the digest of the retrieved Directory matches the expected digest.
+        let actual_digest = blake3::hash(directory_data.value().as_slice());
+        if actual_digest.as_bytes() != digest.as_slice() {
+            warn!(directory.actual_digest=%actual_digest, "requested Directory got the wrong digest");
+            return Err(Error::StorageError(
+                "requested Directory got the wrong digest".to_string(),
+            ));
+        }
+
+        // Attempt to decode the retrieved protobuf-encoded Directory, returning a parsing error if
+        // the decoding failed.
+        let directory = match proto::Directory::decode(&*directory_data.value()) {
+            Ok(dir) => {
+                // The returned Directory must be valid.
+                dir.try_into().map_err(|e| {
+                    warn!(err=%e, "Directory failed validation");
+                    Error::StorageError("Directory failed validation".to_string())
+                })?
+            }
+            Err(e) => {
+                warn!(err=%e, "failed to parse Directory");
+                return Err(Error::StorageError("failed to parse Directory".to_string()));
+            }
+        };
+
+        Ok(Some(directory))
+    }
+
+    #[instrument(skip(self, directory), fields(directory.digest = %directory.digest()))]
+    async fn put(&self, directory: Directory) -> Result<B3Digest, Error> {
+        tokio::task::spawn_blocking({
+            let db = self.db.clone();
+            move || {
+                let digest = directory.digest();
+
+                // Store the directory in the table.
+                let txn = db.begin_write()?;
+                {
+                    let mut table = txn.open_table(DIRECTORY_TABLE)?;
+                    let digest_as_array: [u8; digests::B3_LEN] = digest.clone().into();
+                    table.insert(
+                        digest_as_array,
+                        proto::Directory::from(directory).encode_to_vec(),
+                    )?;
+                }
+                txn.commit()?;
+
+                Ok(digest)
+            }
+        })
+        .await?
+    }
+
+    #[instrument(skip_all, fields(directory.digest = %root_directory_digest))]
+    fn get_recursive(
+        &self,
+        root_directory_digest: &B3Digest,
+    ) -> BoxStream<'static, Result<Directory, Error>> {
+        // FUTUREWORK: Ideally we should have all of the directory traversing happen in a single
+        // redb transaction to avoid constantly closing and opening new transactions for the
+        // database.
+        traverse_directory(self.clone(), root_directory_digest)
+    }
+
+    #[instrument(skip_all)]
+    fn put_multiple_start(&self) -> Box<dyn DirectoryPutter> {
+        Box::new(RedbDirectoryPutter {
+            db: self.db.clone(),
+            directory_validator: Some(Default::default()),
+        })
+    }
+}
+
+pub struct RedbDirectoryPutter {
+    db: Arc<Database>,
+
+    /// The directories (inside the directory validator) that we insert later,
+    /// or None, if they were already inserted.
+    directory_validator: Option<DirectoryGraph<LeavesToRootValidator>>,
+}
+
+#[async_trait]
+impl DirectoryPutter for RedbDirectoryPutter {
+    #[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest()), err)]
+    async fn put(&mut self, directory: Directory) -> Result<(), Error> {
+        match self.directory_validator {
+            None => return Err(Error::StorageError("already closed".to_string())),
+            Some(ref mut validator) => {
+                validator
+                    .add(directory)
+                    .map_err(|e| Error::StorageError(e.to_string()))?;
+            }
+        }
+
+        Ok(())
+    }
+
+    #[instrument(level = "trace", skip_all, ret, err)]
+    async fn close(&mut self) -> Result<B3Digest, Error> {
+        match self.directory_validator.take() {
+            None => Err(Error::StorageError("already closed".to_string())),
+            Some(validator) => {
+                // Insert all directories as a batch.
+                tokio::task::spawn_blocking({
+                    let txn = self.db.begin_write()?;
+                    move || {
+                        // Retrieve the validated directories.
+                        let directories = validator
+                            .validate()
+                            .map_err(|e| Error::StorageError(e.to_string()))?
+                            .drain_leaves_to_root()
+                            .collect::<Vec<_>>();
+
+                        // Get the root digest, which is at the end (cf. insertion order)
+                        let root_digest = directories
+                            .last()
+                            .ok_or_else(|| Error::StorageError("got no directories".to_string()))?
+                            .digest();
+
+                        {
+                            let mut table = txn.open_table(DIRECTORY_TABLE)?;
+
+                            // Looping over all the verified directories, queuing them up for a
+                            // batch insertion.
+                            for directory in directories {
+                                let digest_as_array: [u8; digests::B3_LEN] =
+                                    directory.digest().into();
+                                table.insert(
+                                    digest_as_array,
+                                    proto::Directory::from(directory).encode_to_vec(),
+                                )?;
+                            }
+                        }
+
+                        txn.commit()?;
+
+                        Ok(root_digest)
+                    }
+                })
+                .await?
+            }
+        }
+    }
+}
+
+#[derive(serde::Deserialize)]
+#[serde(deny_unknown_fields)]
+pub struct RedbDirectoryServiceConfig {
+    is_temporary: bool,
+    #[serde(default)]
+    /// required when is_temporary = false
+    path: Option<PathBuf>,
+}
+
+impl TryFrom<url::Url> for RedbDirectoryServiceConfig {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    fn try_from(url: url::Url) -> Result<Self, Self::Error> {
+        // redb doesn't support host, and a path can be provided (otherwise
+        // it'll live in memory only).
+        if url.has_host() {
+            return Err(Error::StorageError("no host allowed".to_string()).into());
+        }
+
+        Ok(if url.path().is_empty() {
+            RedbDirectoryServiceConfig {
+                is_temporary: true,
+                path: None,
+            }
+        } else {
+            RedbDirectoryServiceConfig {
+                is_temporary: false,
+                path: Some(url.path().into()),
+            }
+        })
+    }
+}
+
+#[async_trait]
+impl ServiceBuilder for RedbDirectoryServiceConfig {
+    type Output = dyn DirectoryService;
+    async fn build<'a>(
+        &'a self,
+        _instance_name: &str,
+        _context: &CompositionContext,
+    ) -> Result<Arc<dyn DirectoryService>, Box<dyn std::error::Error + Send + Sync + 'static>> {
+        match self {
+            RedbDirectoryServiceConfig {
+                is_temporary: true,
+                path: None,
+            } => Ok(Arc::new(RedbDirectoryService::new_temporary()?)),
+            RedbDirectoryServiceConfig {
+                is_temporary: true,
+                path: Some(_),
+            } => Err(Error::StorageError(
+                "Temporary RedbDirectoryService can not have path".into(),
+            )
+            .into()),
+            RedbDirectoryServiceConfig {
+                is_temporary: false,
+                path: None,
+            } => Err(Error::StorageError("RedbDirectoryService is missing path".into()).into()),
+            RedbDirectoryServiceConfig {
+                is_temporary: false,
+                path: Some(path),
+            } => Ok(Arc::new(RedbDirectoryService::new(path.into()).await?)),
+        }
+    }
+}
diff --git a/tvix/castore/src/directoryservice/simple_putter.rs b/tvix/castore/src/directoryservice/simple_putter.rs
index 25617ebcac82..b4daaee61b22 100644
--- a/tvix/castore/src/directoryservice/simple_putter.rs
+++ b/tvix/castore/src/directoryservice/simple_putter.rs
@@ -1,7 +1,6 @@
-use super::ClosureValidator;
 use super::DirectoryPutter;
 use super::DirectoryService;
-use crate::proto;
+use super::{Directory, DirectoryGraph, LeavesToRootValidator};
 use crate::B3Digest;
 use crate::Error;
 use tonic::async_trait;
@@ -14,7 +13,7 @@ use tracing::warn;
 pub struct SimplePutter<DS: DirectoryService> {
     directory_service: DS,
 
-    directory_validator: Option<ClosureValidator>,
+    directory_validator: Option<DirectoryGraph<LeavesToRootValidator>>,
 }
 
 impl<DS: DirectoryService> SimplePutter<DS> {
@@ -29,11 +28,13 @@ impl<DS: DirectoryService> SimplePutter<DS> {
 #[async_trait]
 impl<DS: DirectoryService + 'static> DirectoryPutter for SimplePutter<DS> {
     #[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest()), err)]
-    async fn put(&mut self, directory: proto::Directory) -> Result<(), Error> {
+    async fn put(&mut self, directory: Directory) -> Result<(), Error> {
         match self.directory_validator {
             None => return Err(Error::StorageError("already closed".to_string())),
             Some(ref mut validator) => {
-                validator.add(directory)?;
+                validator
+                    .add(directory)
+                    .map_err(|e| Error::StorageError(e.to_string()))?;
             }
         }
 
@@ -46,7 +47,11 @@ impl<DS: DirectoryService + 'static> DirectoryPutter for SimplePutter<DS> {
             None => Err(Error::InvalidRequest("already closed".to_string())),
             Some(validator) => {
                 // retrieve the validated directories.
-                let directories = validator.finalize()?;
+                let directories = validator
+                    .validate()
+                    .map_err(|e| Error::StorageError(e.to_string()))?
+                    .drain_leaves_to_root()
+                    .collect::<Vec<_>>();
 
                 // Get the root digest, which is at the end (cf. insertion order)
                 let root_digest = directories
diff --git a/tvix/castore/src/directoryservice/sled.rs b/tvix/castore/src/directoryservice/sled.rs
index 9490a49c00ae..4f3a860d14e4 100644
--- a/tvix/castore/src/directoryservice/sled.rs
+++ b/tvix/castore/src/directoryservice/sled.rs
@@ -1,14 +1,15 @@
-use crate::proto::Directory;
-use crate::{proto, B3Digest, Error};
 use futures::stream::BoxStream;
 use prost::Message;
 use std::ops::Deref;
 use std::path::Path;
+use std::sync::Arc;
 use tonic::async_trait;
 use tracing::{instrument, warn};
 
 use super::utils::traverse_directory;
-use super::{ClosureValidator, DirectoryPutter, DirectoryService};
+use super::{Directory, DirectoryGraph, DirectoryPutter, DirectoryService, LeavesToRootValidator};
+use crate::composition::{CompositionContext, ServiceBuilder};
+use crate::{proto, B3Digest, Error};
 
 #[derive(Clone)]
 pub struct SledDirectoryService {
@@ -17,6 +18,12 @@ pub struct SledDirectoryService {
 
 impl SledDirectoryService {
     pub fn new<P: AsRef<Path>>(p: P) -> Result<Self, sled::Error> {
+        if p.as_ref() == Path::new("/") {
+            return Err(sled::Error::Unsupported(
+                "cowardly refusing to open / with sled".to_string(),
+            ));
+        }
+
         let config = sled::Config::default()
             .use_compression(false) // is a required parameter
             .path(p);
@@ -36,7 +43,7 @@ impl SledDirectoryService {
 #[async_trait]
 impl DirectoryService for SledDirectoryService {
     #[instrument(skip(self, digest), fields(directory.digest = %digest))]
-    async fn get(&self, digest: &B3Digest) -> Result<Option<proto::Directory>, Error> {
+    async fn get(&self, digest: &B3Digest) -> Result<Option<Directory>, Error> {
         let resp = tokio::task::spawn_blocking({
             let db = self.db.clone();
             let digest = digest.clone();
@@ -53,7 +60,7 @@ impl DirectoryService for SledDirectoryService {
             None => Ok(None),
 
             // The directory was found, try to parse the data as Directory message
-            Some(data) => match Directory::decode(&*data) {
+            Some(data) => match proto::Directory::decode(&*data) {
                 Ok(directory) => {
                     // Validate the retrieved Directory indeed has the
                     // digest we expect it to have, to detect corruptions.
@@ -65,14 +72,10 @@ impl DirectoryService for SledDirectoryService {
                         )));
                     }
 
-                    // Validate the Directory itself is valid.
-                    if let Err(e) = directory.validate() {
-                        warn!("directory failed validation: {}", e.to_string());
-                        return Err(Error::StorageError(format!(
-                            "directory {} failed validation: {}",
-                            actual_digest, e,
-                        )));
-                    }
+                    let directory = directory.try_into().map_err(|e| {
+                        warn!("failed to retrieve directory: {}", e);
+                        Error::StorageError(format!("failed to retrieve directory: {}", e))
+                    })?;
 
                     Ok(Some(directory))
                 }
@@ -85,22 +88,18 @@ impl DirectoryService for SledDirectoryService {
     }
 
     #[instrument(skip(self, directory), fields(directory.digest = %directory.digest()))]
-    async fn put(&self, directory: proto::Directory) -> Result<B3Digest, Error> {
+    async fn put(&self, directory: Directory) -> Result<B3Digest, Error> {
         tokio::task::spawn_blocking({
             let db = self.db.clone();
             move || {
                 let digest = directory.digest();
 
-                // validate the directory itself.
-                if let Err(e) = directory.validate() {
-                    return Err(Error::InvalidRequest(format!(
-                        "directory {} failed validation: {}",
-                        digest, e,
-                    )));
-                }
                 // store it
-                db.insert(digest.as_slice(), directory.encode_to_vec())
-                    .map_err(|e| Error::StorageError(e.to_string()))?;
+                db.insert(
+                    digest.as_slice(),
+                    proto::Directory::from(directory).encode_to_vec(),
+                )
+                .map_err(|e| Error::StorageError(e.to_string()))?;
 
                 Ok(digest)
             }
@@ -112,7 +111,7 @@ impl DirectoryService for SledDirectoryService {
     fn get_recursive(
         &self,
         root_directory_digest: &B3Digest,
-    ) -> BoxStream<'static, Result<proto::Directory, Error>> {
+    ) -> BoxStream<'static, Result<Directory, Error>> {
         traverse_directory(self.clone(), root_directory_digest)
     }
 
@@ -128,6 +127,72 @@ impl DirectoryService for SledDirectoryService {
     }
 }
 
+#[derive(serde::Deserialize)]
+#[serde(deny_unknown_fields)]
+pub struct SledDirectoryServiceConfig {
+    is_temporary: bool,
+    #[serde(default)]
+    /// required when is_temporary = false
+    path: Option<String>,
+}
+
+impl TryFrom<url::Url> for SledDirectoryServiceConfig {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    fn try_from(url: url::Url) -> Result<Self, Self::Error> {
+        // sled doesn't support host, and a path can be provided (otherwise
+        // it'll live in memory only).
+        if url.has_host() {
+            return Err(Error::StorageError("no host allowed".to_string()).into());
+        }
+
+        // TODO: expose compression and other parameters as URL parameters?
+
+        Ok(if url.path().is_empty() {
+            SledDirectoryServiceConfig {
+                is_temporary: true,
+                path: None,
+            }
+        } else {
+            SledDirectoryServiceConfig {
+                is_temporary: false,
+                path: Some(url.path().to_string()),
+            }
+        })
+    }
+}
+
+#[async_trait]
+impl ServiceBuilder for SledDirectoryServiceConfig {
+    type Output = dyn DirectoryService;
+    async fn build<'a>(
+        &'a self,
+        _instance_name: &str,
+        _context: &CompositionContext,
+    ) -> Result<Arc<dyn DirectoryService>, Box<dyn std::error::Error + Send + Sync + 'static>> {
+        match self {
+            SledDirectoryServiceConfig {
+                is_temporary: true,
+                path: None,
+            } => Ok(Arc::new(SledDirectoryService::new_temporary()?)),
+            SledDirectoryServiceConfig {
+                is_temporary: true,
+                path: Some(_),
+            } => Err(Error::StorageError(
+                "Temporary SledDirectoryService can not have path".into(),
+            )
+            .into()),
+            SledDirectoryServiceConfig {
+                is_temporary: false,
+                path: None,
+            } => Err(Error::StorageError("SledDirectoryService is missing path".into()).into()),
+            SledDirectoryServiceConfig {
+                is_temporary: false,
+                path: Some(path),
+            } => Ok(Arc::new(SledDirectoryService::new(path)?)),
+        }
+    }
+}
+
 /// Buffers Directory messages to be uploaded and inserts them in a batch
 /// transaction on close.
 pub struct SledDirectoryPutter {
@@ -135,17 +200,19 @@ pub struct SledDirectoryPutter {
 
     /// The directories (inside the directory validator) that we insert later,
     /// or None, if they were already inserted.
-    directory_validator: Option<ClosureValidator>,
+    directory_validator: Option<DirectoryGraph<LeavesToRootValidator>>,
 }
 
 #[async_trait]
 impl DirectoryPutter for SledDirectoryPutter {
     #[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest()), err)]
-    async fn put(&mut self, directory: proto::Directory) -> Result<(), Error> {
+    async fn put(&mut self, directory: Directory) -> Result<(), Error> {
         match self.directory_validator {
             None => return Err(Error::StorageError("already closed".to_string())),
             Some(ref mut validator) => {
-                validator.add(directory)?;
+                validator
+                    .add(directory)
+                    .map_err(|e| Error::StorageError(e.to_string()))?;
             }
         }
 
@@ -162,7 +229,11 @@ impl DirectoryPutter for SledDirectoryPutter {
                     let tree = self.tree.clone();
                     move || {
                         // retrieve the validated directories.
-                        let directories = validator.finalize()?;
+                        let directories = validator
+                            .validate()
+                            .map_err(|e| Error::StorageError(e.to_string()))?
+                            .drain_leaves_to_root()
+                            .collect::<Vec<_>>();
 
                         // Get the root digest, which is at the end (cf. insertion order)
                         let root_digest = directories
@@ -172,7 +243,10 @@ impl DirectoryPutter for SledDirectoryPutter {
 
                         let mut batch = sled::Batch::default();
                         for directory in directories {
-                            batch.insert(directory.digest().as_slice(), directory.encode_to_vec());
+                            batch.insert(
+                                directory.digest().as_slice(),
+                                proto::Directory::from(directory).encode_to_vec(),
+                            );
                         }
 
                         tree.apply_batch(batch).map_err(|e| {
diff --git a/tvix/castore/src/directoryservice/tests/mod.rs b/tvix/castore/src/directoryservice/tests/mod.rs
index cc3c5b788a2c..ad189564bfe7 100644
--- a/tvix/castore/src/directoryservice/tests/mod.rs
+++ b/tvix/castore/src/directoryservice/tests/mod.rs
@@ -8,10 +8,8 @@ use rstest_reuse::{self, *};
 
 use super::DirectoryService;
 use crate::directoryservice;
-use crate::{
-    fixtures::{DIRECTORY_A, DIRECTORY_B, DIRECTORY_C},
-    proto::{self, Directory},
-};
+use crate::fixtures::{DIRECTORY_A, DIRECTORY_B, DIRECTORY_C, DIRECTORY_D};
+use crate::{Directory, Node};
 
 mod utils;
 use self::utils::make_grpc_directory_service_client;
@@ -26,16 +24,27 @@ use self::utils::make_grpc_directory_service_client;
 #[case::grpc(make_grpc_directory_service_client().await)]
 #[case::memory(directoryservice::from_addr("memory://").await.unwrap())]
 #[case::sled(directoryservice::from_addr("sled://").await.unwrap())]
+#[case::redb(directoryservice::from_addr("redb://").await.unwrap())]
 #[case::objectstore(directoryservice::from_addr("objectstore+memory://").await.unwrap())]
 #[cfg_attr(all(feature = "cloud", feature = "integration"), case::bigtable(directoryservice::from_addr("bigtable://instance-1?project_id=project-1&table_name=table-1&family_name=cf1").await.unwrap()))]
 pub fn directory_services(#[case] directory_service: impl DirectoryService) {}
 
-/// Ensures asking for a directory that doesn't exist returns a Ok(None).
+/// Ensures asking for a directory that doesn't exist returns a Ok(None), and a get_recursive
+/// returns an empty stream.
 #[apply(directory_services)]
 #[tokio::test]
 async fn test_non_exist(directory_service: impl DirectoryService) {
-    let resp = directory_service.get(&DIRECTORY_A.digest()).await;
-    assert!(resp.unwrap().is_none())
+    // single get
+    assert_eq!(Ok(None), directory_service.get(&DIRECTORY_A.digest()).await);
+
+    // recursive get
+    assert_eq!(
+        Vec::<Result<Directory, crate::Error>>::new(),
+        directory_service
+            .get_recursive(&DIRECTORY_A.digest())
+            .collect::<Vec<Result<Directory, crate::Error>>>()
+            .await
+    );
 }
 
 /// Putting a single directory into the store, and then getting it out both via
@@ -123,6 +132,46 @@ async fn put_get_multiple_dedup(directory_service: impl DirectoryService) {
     )
 }
 
+/// This tests the insertion and retrieval of a closure which contains a duplicated directory
+/// (DIRECTORY_A, which is an empty directory), once in the root, and once in a subdir.
+#[apply(directory_services)]
+#[tokio::test]
+async fn put_get_foo(directory_service: impl DirectoryService) {
+    let mut handle = directory_service.put_multiple_start();
+    handle.put(DIRECTORY_A.clone()).await.unwrap();
+    handle.put(DIRECTORY_B.clone()).await.unwrap();
+    handle.put(DIRECTORY_D.clone()).await.unwrap();
+    let root_digest = handle.close().await.unwrap();
+    assert_eq!(
+        DIRECTORY_D.digest(),
+        root_digest,
+        "root digest should match"
+    );
+
+    // Ensure we can get the closure back out of the service, and it is returned in a valid order
+    // (there are multiple valid possibilities)
+    let retrieved_closure = directory_service
+        .get_recursive(&DIRECTORY_D.digest())
+        .collect::<Vec<_>>()
+        .await;
+
+    let valid_closures = [
+        vec![
+            Ok(DIRECTORY_D.clone()),
+            Ok(DIRECTORY_B.clone()),
+            Ok(DIRECTORY_A.clone()),
+        ],
+        vec![
+            Ok(DIRECTORY_D.clone()),
+            Ok(DIRECTORY_A.clone()),
+            Ok(DIRECTORY_B.clone()),
+        ],
+    ];
+    if !valid_closures.contains(&retrieved_closure) {
+        panic!("invalid closure returned: {:?}", retrieved_closure);
+    }
+}
+
 /// Uploading A, then C (referring to A twice), then B (itself referring to A) should fail during close,
 /// as B itself would be left unconnected.
 #[apply(directory_services)]
@@ -161,58 +210,20 @@ async fn upload_reject_dangling_pointer(directory_service: impl DirectoryService
     }
 }
 
-/// Try uploading a Directory failing its internal validation, ensure it gets
-/// rejected.
-#[apply(directory_services)]
-#[tokio::test]
-async fn upload_reject_failing_validation(directory_service: impl DirectoryService) {
-    let broken_directory = Directory {
-        symlinks: vec![proto::SymlinkNode {
-            name: "".into(), // wrong!
-            target: "doesntmatter".into(),
-        }],
-        ..Default::default()
-    };
-    assert!(broken_directory.validate().is_err());
-
-    // Try to upload via single upload.
-    assert!(
-        directory_service
-            .put(broken_directory.clone())
-            .await
-            .is_err(),
-        "single upload must fail"
-    );
-
-    // Try to upload via put_multiple. We're a bit more permissive here, the
-    // intermediate .put() might succeed, due to client-side bursting (in the
-    // case of gRPC), but then the close MUST fail.
-    let mut handle = directory_service.put_multiple_start();
-    if handle.put(broken_directory).await.is_ok() {
-        assert!(
-            handle.close().await.is_err(),
-            "when succeeding put, close must fail"
-        )
-    }
-}
-
 /// Try uploading a Directory that refers to a previously-uploaded directory.
 /// Both pass their isolated validation, but the size field in the parent is wrong.
 /// This should be rejected.
 #[apply(directory_services)]
 #[tokio::test]
 async fn upload_reject_wrong_size(directory_service: impl DirectoryService) {
-    let wrong_parent_directory = Directory {
-        directories: vec![proto::DirectoryNode {
-            name: "foo".into(),
-            digest: DIRECTORY_A.digest().into(),
+    let wrong_parent_directory = Directory::try_from_iter([(
+        "foo".try_into().unwrap(),
+        Node::Directory {
+            digest: DIRECTORY_A.digest(),
             size: DIRECTORY_A.size() + 42, // wrong!
-        }],
-        ..Default::default()
-    };
-
-    // Make sure isolated validation itself is ok
-    assert!(wrong_parent_directory.validate().is_ok());
+        },
+    )])
+    .unwrap();
 
     // Now upload both. Ensure it either fails during the second put, or during
     // the close.
diff --git a/tvix/castore/src/directoryservice/tests/utils.rs b/tvix/castore/src/directoryservice/tests/utils.rs
index 0f706695eec8..3d245ea412d5 100644
--- a/tvix/castore/src/directoryservice/tests/utils.rs
+++ b/tvix/castore/src/directoryservice/tests/utils.rs
@@ -6,6 +6,7 @@ use crate::{
     proto::directory_service_server::DirectoryServiceServer,
 };
 
+use hyper_util::rt::TokioIo;
 use tonic::transport::{Endpoint, Server, Uri};
 
 /// Constructs and returns a gRPC DirectoryService.
@@ -37,7 +38,7 @@ pub async fn make_grpc_directory_service_client() -> Box<dyn DirectoryService> {
                 .unwrap()
                 .connect_with_connector(tower::service_fn(move |_: Uri| {
                     let right = maybe_right.take().unwrap();
-                    async move { Ok::<_, std::io::Error>(right) }
+                    async move { Ok::<_, std::io::Error>(TokioIo::new(right)) }
                 }))
                 .await
                 .unwrap(),
diff --git a/tvix/castore/src/directoryservice/traverse.rs b/tvix/castore/src/directoryservice/traverse.rs
index 17a51ae2bbff..0bd67e9bcf1f 100644
--- a/tvix/castore/src/directoryservice/traverse.rs
+++ b/tvix/castore/src/directoryservice/traverse.rs
@@ -1,8 +1,4 @@
-use super::DirectoryService;
-use crate::{
-    proto::{node::Node, NamedNode},
-    B3Digest, Error, Path,
-};
+use crate::{directoryservice::DirectoryService, Error, Node, Path};
 use tracing::{instrument, warn};
 
 /// This descends from a (root) node to the given (sub)path, returning the Node
@@ -17,19 +13,14 @@ where
     DS: AsRef<dyn DirectoryService>,
 {
     let mut parent_node = root_node;
-    for component in path.as_ref().components() {
+    for component in path.as_ref().components_bytes() {
         match parent_node {
-            Node::File(_) | Node::Symlink(_) => {
+            Node::File { .. } | Node::Symlink { .. } => {
                 // There's still some path left, but the parent node is no directory.
                 // This means the path doesn't exist, as we can't reach it.
                 return Ok(None);
             }
-            Node::Directory(directory_node) => {
-                let digest: B3Digest = directory_node
-                    .digest
-                    .try_into()
-                    .map_err(|_e| Error::StorageError("invalid digest length".to_string()))?;
-
+            Node::Directory { digest, .. } => {
                 // fetch the linked node from the directory_service.
                 let directory =
                     directory_service
@@ -44,15 +35,16 @@ where
                         })?;
 
                 // look for the component in the [Directory].
-                // FUTUREWORK: as the nodes() iterator returns in a sorted fashion, we
-                // could stop as soon as e.name is larger than the search string.
-                if let Some(child_node) = directory.nodes().find(|n| n.get_name() == component) {
+                if let Some((_child_name, child_node)) = directory
+                    .into_nodes()
+                    .find(|(name, _node)| name.as_ref() == component)
+                {
                     // child node found, update prev_node to that and continue.
-                    parent_node = child_node;
+                    parent_node = child_node.clone();
                 } else {
                     // child node not found means there's no such element inside the directory.
                     return Ok(None);
-                }
+                };
             }
         }
     }
@@ -65,8 +57,8 @@ where
 mod tests {
     use crate::{
         directoryservice,
-        fixtures::{DIRECTORY_COMPLICATED, DIRECTORY_WITH_KEEP},
-        PathBuf,
+        fixtures::{DIRECTORY_COMPLICATED, DIRECTORY_WITH_KEEP, EMPTY_BLOB_DIGEST},
+        Node, PathBuf,
     };
 
     use super::descend_to;
@@ -88,21 +80,23 @@ mod tests {
         handle.close().await.expect("must upload");
 
         // construct the node for DIRECTORY_COMPLICATED
-        let node_directory_complicated =
-            crate::proto::node::Node::Directory(crate::proto::DirectoryNode {
-                name: "doesntmatter".into(),
-                digest: DIRECTORY_COMPLICATED.digest().into(),
-                size: DIRECTORY_COMPLICATED.size(),
-            });
+        let node_directory_complicated = Node::Directory {
+            digest: DIRECTORY_COMPLICATED.digest(),
+            size: DIRECTORY_COMPLICATED.size(),
+        };
 
         // construct the node for DIRECTORY_COMPLICATED
-        let node_directory_with_keep = crate::proto::node::Node::Directory(
-            DIRECTORY_COMPLICATED.directories.first().unwrap().clone(),
-        );
+        let node_directory_with_keep = Node::Directory {
+            digest: DIRECTORY_WITH_KEEP.digest(),
+            size: DIRECTORY_WITH_KEEP.size(),
+        };
 
         // construct the node for the .keep file
-        let node_file_keep =
-            crate::proto::node::Node::File(DIRECTORY_WITH_KEEP.files.first().unwrap().clone());
+        let node_file_keep = Node::File {
+            digest: EMPTY_BLOB_DIGEST.clone(),
+            size: 0,
+            executable: false,
+        };
 
         // traversal to an empty subpath should return the root node.
         {
diff --git a/tvix/castore/src/directoryservice/utils.rs b/tvix/castore/src/directoryservice/utils.rs
index a0ba395ecda8..d073c2c3c8ec 100644
--- a/tvix/castore/src/directoryservice/utils.rs
+++ b/tvix/castore/src/directoryservice/utils.rs
@@ -1,21 +1,22 @@
+use super::Directory;
 use super::DirectoryService;
-use crate::proto;
 use crate::B3Digest;
 use crate::Error;
+use crate::Node;
 use async_stream::try_stream;
 use futures::stream::BoxStream;
 use std::collections::{HashSet, VecDeque};
 use tracing::instrument;
 use tracing::warn;
 
-/// Traverses a [proto::Directory] from the root to the children.
+/// Traverses a [Directory] from the root to the children.
 ///
 /// This is mostly BFS, but directories are only returned once.
 #[instrument(skip(directory_service))]
 pub fn traverse_directory<'a, DS: DirectoryService + 'static>(
     directory_service: DS,
     root_directory_digest: &B3Digest,
-) -> BoxStream<'a, Result<proto::Directory, Error>> {
+) -> BoxStream<'a, Result<Directory, Error>> {
     // The list of all directories that still need to be traversed. The next
     // element is picked from the front, new elements are enqueued at the
     // back.
@@ -25,32 +26,30 @@ pub fn traverse_directory<'a, DS: DirectoryService + 'static>(
     // We omit sending the same directories multiple times.
     let mut sent_directory_digests: HashSet<B3Digest> = HashSet::new();
 
+    let root_directory_digest = root_directory_digest.clone();
+
     Box::pin(try_stream! {
         while let Some(current_directory_digest) = worklist_directory_digests.pop_front() {
-            let current_directory = directory_service.get(&current_directory_digest).await.map_err(|e| {
+            let current_directory = match directory_service.get(&current_directory_digest).await.map_err(|e| {
                 warn!("failed to look up directory");
                 Error::StorageError(format!(
                     "unable to look up directory {}: {}",
                     current_directory_digest, e
                 ))
-            })?.ok_or_else(|| {
-                // if it's not there, we have an inconsistent store!
-                warn!("directory {} does not exist", current_directory_digest);
-                Error::StorageError(format!(
-                    "directory {} does not exist",
-                    current_directory_digest
-                ))
-
-            })?;
-
-            // validate, we don't want to send invalid directories.
-            current_directory.validate().map_err(|e| {
-               warn!("directory failed validation: {}", e.to_string());
-               Error::StorageError(format!(
-                   "invalid directory: {}",
-                   current_directory_digest
-               ))
-            })?;
+            })? {
+                // the root node of the requested closure was not found, return an empty list
+                None if current_directory_digest == root_directory_digest => break,
+                // if a child directory of the closure is not there, we have an inconsistent store!
+                None => {
+                    warn!("directory {} does not exist", current_directory_digest);
+                    Err(Error::StorageError(format!(
+                        "directory {} does not exist",
+                        current_directory_digest
+                    )))?;
+                    break;
+                }
+                Some(dir) => dir,
+            };
 
             // We're about to send this directory, so let's avoid sending it again if a
             // descendant has it.
@@ -59,16 +58,15 @@ pub fn traverse_directory<'a, DS: DirectoryService + 'static>(
             // enqueue all child directory digests to the work queue, as
             // long as they're not part of the worklist or already sent.
             // This panics if the digest looks invalid, it's supposed to be checked first.
-            for child_directory_node in &current_directory.directories {
-                // TODO: propagate error
-                let child_digest: B3Digest = child_directory_node.digest.clone().try_into().unwrap();
-
-                if worklist_directory_digests.contains(&child_digest)
-                    || sent_directory_digests.contains(&child_digest)
-                {
-                    continue;
+            for (_, child_directory_node) in current_directory.nodes() {
+                if let Node::Directory{digest: child_digest, ..} = child_directory_node {
+                    if worklist_directory_digests.contains(child_digest)
+                        || sent_directory_digests.contains(child_digest)
+                    {
+                        continue;
+                    }
+                    worklist_directory_digests.push_back(child_digest.clone());
                 }
-                worklist_directory_digests.push_back(child_digest);
             }
 
             yield current_directory;
diff --git a/tvix/castore/src/errors.rs b/tvix/castore/src/errors.rs
index 8343d0774aec..7b5d1a422c99 100644
--- a/tvix/castore/src/errors.rs
+++ b/tvix/castore/src/errors.rs
@@ -1,7 +1,13 @@
+use bstr::ByteSlice;
 use thiserror::Error;
 use tokio::task::JoinError;
 use tonic::Status;
 
+use crate::{
+    path::{PathComponent, PathComponentError},
+    SymlinkTargetError,
+};
+
 /// Errors related to communication with the store.
 #[derive(Debug, Error, PartialEq)]
 pub enum Error {
@@ -12,6 +18,48 @@ pub enum Error {
     StorageError(String),
 }
 
+/// Errors that occur during construction of [crate::Node]
+#[derive(Debug, thiserror::Error, PartialEq)]
+pub enum ValidateNodeError {
+    /// Invalid digest length encountered
+    #[error("invalid digest length: {0}")]
+    InvalidDigestLen(usize),
+    /// Invalid symlink target
+    #[error("Invalid symlink target: {0}")]
+    InvalidSymlinkTarget(SymlinkTargetError),
+}
+
+impl From<crate::digests::Error> for ValidateNodeError {
+    fn from(e: crate::digests::Error) -> Self {
+        match e {
+            crate::digests::Error::InvalidDigestLen(n) => ValidateNodeError::InvalidDigestLen(n),
+        }
+    }
+}
+
+/// Errors that can occur when populating [crate::Directory] messages,
+/// or parsing [crate::proto::Directory]
+#[derive(Debug, thiserror::Error, PartialEq)]
+pub enum DirectoryError {
+    /// Multiple elements with the same name encountered
+    #[error("{:?} is a duplicate name", .0)]
+    DuplicateName(PathComponent),
+    /// Node failed validation
+    #[error("invalid node with name {}: {:?}", .0, .1.to_string())]
+    InvalidNode(PathComponent, ValidateNodeError),
+    #[error("Total size exceeds u64::MAX")]
+    SizeOverflow,
+    /// Invalid name encountered
+    #[error("Invalid name: {0}")]
+    InvalidName(PathComponentError),
+    /// Elements are not in sorted order. Can only happen on protos
+    #[error("{:?} is not sorted", .0.as_bstr())]
+    WrongSorting(bytes::Bytes),
+    /// This can only happen if there's an unknown node type (on protos)
+    #[error("No node set")]
+    NoNodeSet,
+}
+
 impl From<JoinError> for Error {
     fn from(value: JoinError) -> Self {
         Error::StorageError(value.to_string())
@@ -33,6 +81,42 @@ impl From<crate::tonic::Error> for Error {
     }
 }
 
+impl From<redb::Error> for Error {
+    fn from(value: redb::Error) -> Self {
+        Error::StorageError(value.to_string())
+    }
+}
+
+impl From<redb::DatabaseError> for Error {
+    fn from(value: redb::DatabaseError) -> Self {
+        Error::StorageError(value.to_string())
+    }
+}
+
+impl From<redb::TableError> for Error {
+    fn from(value: redb::TableError) -> Self {
+        Error::StorageError(value.to_string())
+    }
+}
+
+impl From<redb::TransactionError> for Error {
+    fn from(value: redb::TransactionError) -> Self {
+        Error::StorageError(value.to_string())
+    }
+}
+
+impl From<redb::StorageError> for Error {
+    fn from(value: redb::StorageError) -> Self {
+        Error::StorageError(value.to_string())
+    }
+}
+
+impl From<redb::CommitError> for Error {
+    fn from(value: redb::CommitError) -> Self {
+        Error::StorageError(value.to_string())
+    }
+}
+
 impl From<std::io::Error> for Error {
     fn from(value: std::io::Error) -> Self {
         if value.kind() == std::io::ErrorKind::InvalidInput {
diff --git a/tvix/castore/src/fixtures.rs b/tvix/castore/src/fixtures.rs
index a206d9b7ddc6..05bad916d55f 100644
--- a/tvix/castore/src/fixtures.rs
+++ b/tvix/castore/src/fixtures.rs
@@ -1,7 +1,4 @@
-use crate::{
-    proto::{self, Directory, DirectoryNode, FileNode, SymlinkNode},
-    B3Digest,
-};
+use crate::{B3Digest, Directory, Node};
 use lazy_static::lazy_static;
 
 pub const HELLOWORLD_BLOB_CONTENTS: &[u8] = b"Hello World!";
@@ -34,55 +31,74 @@ lazy_static! {
     pub static ref BLOB_B_DIGEST: B3Digest = blake3::hash(&BLOB_B).as_bytes().into();
 
     // Directories
-    pub static ref DIRECTORY_WITH_KEEP: proto::Directory = proto::Directory {
-        directories: vec![],
-        files: vec![FileNode {
-            name: b".keep".to_vec().into(),
-            digest: EMPTY_BLOB_DIGEST.clone().into(),
+    pub static ref DIRECTORY_WITH_KEEP: Directory = Directory::try_from_iter([(
+        ".keep".try_into().unwrap(),
+        Node::File{
+            digest: EMPTY_BLOB_DIGEST.clone(),
             size: 0,
-            executable: false,
-        }],
-        symlinks: vec![],
-    };
-    pub static ref DIRECTORY_COMPLICATED: proto::Directory = proto::Directory {
-        directories: vec![DirectoryNode {
-            name: b"keep".to_vec().into(),
-            digest: DIRECTORY_WITH_KEEP.digest().into(),
-            size: DIRECTORY_WITH_KEEP.size(),
-        }],
-        files: vec![FileNode {
-            name: b".keep".to_vec().into(),
-            digest: EMPTY_BLOB_DIGEST.clone().into(),
-            size: 0,
-            executable: false,
-        }],
-        symlinks: vec![SymlinkNode {
-            name: b"aa".to_vec().into(),
-            target: b"/nix/store/somewhereelse".to_vec().into(),
-        }],
-    };
-    pub static ref DIRECTORY_A: Directory = Directory::default();
-    pub static ref DIRECTORY_B: Directory = Directory {
-        directories: vec![DirectoryNode {
-            name: b"a".to_vec().into(),
-            digest: DIRECTORY_A.digest().into(),
-            size: DIRECTORY_A.size(),
-        }],
-        ..Default::default()
-    };
-    pub static ref DIRECTORY_C: Directory = Directory {
-        directories: vec![
-            DirectoryNode {
-                name: b"a".to_vec().into(),
-                digest: DIRECTORY_A.digest().into(),
+            executable: false
+    })]).unwrap();
+    pub static ref DIRECTORY_COMPLICATED: Directory = Directory::try_from_iter([
+        (
+            "keep".try_into().unwrap(),
+            Node::Directory{
+                digest: DIRECTORY_WITH_KEEP.digest(),
+                size: DIRECTORY_WITH_KEEP.size()
+            }
+        ),
+        (
+            ".keep".try_into().unwrap(),
+            Node::File{
+                digest: EMPTY_BLOB_DIGEST.clone(),
+                size: 0,
+                executable: false
+            }
+        ),
+        (
+            "aa".try_into().unwrap(),
+            Node::Symlink{
+                target: "/nix/store/somewhereelse".try_into().unwrap()
+            }
+        )
+    ]).unwrap();
+    pub static ref DIRECTORY_A: Directory = Directory::new();
+    pub static ref DIRECTORY_B: Directory = Directory::try_from_iter([(
+            "a".try_into().unwrap(),
+            Node::Directory{
+                digest: DIRECTORY_A.digest(),
                 size: DIRECTORY_A.size(),
-            },
-            DirectoryNode {
-                name: b"a'".to_vec().into(),
-                digest: DIRECTORY_A.digest().into(),
+            }
+    )]).unwrap();
+    pub static ref DIRECTORY_C: Directory = Directory::try_from_iter([
+        (
+            "a".try_into().unwrap(),
+            Node::Directory{
+                digest: DIRECTORY_A.digest(),
                 size: DIRECTORY_A.size(),
             }
-        ],
-        ..Default::default()
-    };
+        ),
+        (
+            "a'".try_into().unwrap(),
+            Node::Directory{
+                digest: DIRECTORY_A.digest(),
+                size: DIRECTORY_A.size(),
+            }
+        )
+    ]).unwrap();
+    pub static ref DIRECTORY_D: Directory = Directory::try_from_iter([
+        (
+            "a".try_into().unwrap(),
+            Node::Directory{
+                digest: DIRECTORY_A.digest(),
+                size: DIRECTORY_A.size(),
+            }
+        ),
+        (
+            "b".try_into().unwrap(),
+            Node::Directory{
+                digest: DIRECTORY_B.digest(),
+                size: DIRECTORY_B.size(),
+            }
+        )
+    ]).unwrap();
 }
diff --git a/tvix/castore/src/fs/fuse.rs b/tvix/castore/src/fs/fuse/mod.rs
index cd50618ff5bc..64ef29ed2aa1 100644
--- a/tvix/castore/src/fs/fuse.rs
+++ b/tvix/castore/src/fs/fuse/mod.rs
@@ -1,8 +1,13 @@
-use std::{io, path::Path, sync::Arc, thread};
+use std::{io, path::Path, sync::Arc};
 
 use fuse_backend_rs::{api::filesystem::FileSystem, transport::FuseSession};
+use parking_lot::Mutex;
+use threadpool::ThreadPool;
 use tracing::{error, instrument};
 
+#[cfg(test)]
+mod tests;
+
 struct FuseServer<FS>
 where
     FS: FileSystem + Sync + Send,
@@ -46,9 +51,12 @@ where
     }
 }
 
+/// Starts a [Filesystem] with the specified number of threads, and provides
+/// functions to unmount, and wait for it to have completed.
+#[derive(Clone)]
 pub struct FuseDaemon {
-    session: FuseSession,
-    threads: Vec<thread::JoinHandle<()>>,
+    session: Arc<Mutex<FuseSession>>,
+    threads: Arc<ThreadPool>,
 }
 
 impl FuseDaemon {
@@ -56,7 +64,7 @@ impl FuseDaemon {
     pub fn new<FS, P>(
         fs: FS,
         mountpoint: P,
-        threads: usize,
+        num_threads: usize,
         allow_other: bool,
     ) -> Result<Self, io::Error>
     where
@@ -73,40 +81,49 @@ impl FuseDaemon {
         session
             .mount()
             .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
-        let mut join_handles = Vec::with_capacity(threads);
-        for _ in 0..threads {
+
+        // construct a thread pool
+        let threads = threadpool::Builder::new()
+            .num_threads(num_threads)
+            .thread_name("fuse_server".to_string())
+            .build();
+
+        for _ in 0..num_threads {
+            // for each thread requested, create and start a FuseServer accepting requests.
             let mut server = FuseServer {
                 server: server.clone(),
                 channel: session
                     .new_channel()
                     .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?,
             };
-            let join_handle = thread::Builder::new()
-                .name("fuse_server".to_string())
-                .spawn(move || {
-                    let _ = server.start();
-                })?;
-            join_handles.push(join_handle);
+
+            threads.execute(move || {
+                let _ = server.start();
+            });
         }
 
         Ok(FuseDaemon {
-            session,
-            threads: join_handles,
+            session: Arc::new(Mutex::new(session)),
+            threads: Arc::new(threads),
         })
     }
 
+    /// Waits for all threads to finish.
+    #[instrument(skip_all)]
+    pub fn wait(&self) {
+        self.threads.join()
+    }
+
+    /// Send the unmount command, and waits for all threads to finish.
     #[instrument(skip_all, err)]
-    pub fn unmount(&mut self) -> Result<(), io::Error> {
+    pub fn unmount(&self) -> Result<(), io::Error> {
+        // Send the unmount command.
         self.session
+            .lock()
             .umount()
             .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
 
-        for thread in self.threads.drain(..) {
-            thread.join().map_err(|_| {
-                io::Error::new(io::ErrorKind::Other, "failed to join fuse server thread")
-            })?;
-        }
-
+        self.wait();
         Ok(())
     }
 }
diff --git a/tvix/castore/src/fs/tests.rs b/tvix/castore/src/fs/fuse/tests.rs
index d6eeb8a4113d..9e01204d5da7 100644
--- a/tvix/castore/src/fs/tests.rs
+++ b/tvix/castore/src/fs/fuse/tests.rs
@@ -1,5 +1,4 @@
 use bstr::ByteSlice;
-use bytes::Bytes;
 use std::{
     collections::BTreeMap,
     ffi::{OsStr, OsString},
@@ -11,13 +10,15 @@ use std::{
 use tempfile::TempDir;
 use tokio_stream::{wrappers::ReadDirStream, StreamExt};
 
-use super::{fuse::FuseDaemon, TvixStoreFs};
-use crate::proto as castorepb;
-use crate::proto::node::Node;
+use super::FuseDaemon;
 use crate::{
     blobservice::{BlobService, MemoryBlobService},
     directoryservice::{DirectoryService, MemoryDirectoryService},
-    fixtures,
+    fixtures, Node,
+};
+use crate::{
+    fs::{TvixStoreFs, XATTR_NAME_BLOB_DIGEST, XATTR_NAME_DIRECTORY_DIGEST},
+    PathComponent,
 };
 
 const BLOB_A_NAME: &str = "00000000000000000000000000000000-test";
@@ -38,7 +39,7 @@ fn gen_svcs() -> (Arc<dyn BlobService>, Arc<dyn DirectoryService>) {
 fn do_mount<P: AsRef<Path>, BS, DS>(
     blob_service: BS,
     directory_service: DS,
-    root_nodes: BTreeMap<bytes::Bytes, Node>,
+    root_nodes: BTreeMap<PathComponent, Node>,
     mountpoint: P,
     list_root: bool,
     show_xattr: bool,
@@ -59,7 +60,7 @@ where
 
 async fn populate_blob_a(
     blob_service: &Arc<dyn BlobService>,
-    root_nodes: &mut BTreeMap<Bytes, Node>,
+    root_nodes: &mut BTreeMap<PathComponent, Node>,
 ) {
     let mut bw = blob_service.open_write().await;
     tokio::io::copy(&mut Cursor::new(fixtures::BLOB_A.to_vec()), &mut bw)
@@ -68,19 +69,18 @@ async fn populate_blob_a(
     bw.close().await.expect("must succeed closing");
 
     root_nodes.insert(
-        BLOB_A_NAME.into(),
-        Node::File(castorepb::FileNode {
-            name: BLOB_A_NAME.into(),
-            digest: fixtures::BLOB_A_DIGEST.clone().into(),
+        BLOB_A_NAME.try_into().unwrap(),
+        Node::File {
+            digest: fixtures::BLOB_A_DIGEST.clone(),
             size: fixtures::BLOB_A.len() as u64,
             executable: false,
-        }),
+        },
     );
 }
 
 async fn populate_blob_b(
     blob_service: &Arc<dyn BlobService>,
-    root_nodes: &mut BTreeMap<Bytes, Node>,
+    root_nodes: &mut BTreeMap<PathComponent, Node>,
 ) {
     let mut bw = blob_service.open_write().await;
     tokio::io::copy(&mut Cursor::new(fixtures::BLOB_B.to_vec()), &mut bw)
@@ -89,20 +89,19 @@ async fn populate_blob_b(
     bw.close().await.expect("must succeed closing");
 
     root_nodes.insert(
-        BLOB_B_NAME.into(),
-        Node::File(castorepb::FileNode {
-            name: BLOB_B_NAME.into(),
-            digest: fixtures::BLOB_B_DIGEST.clone().into(),
+        BLOB_B_NAME.try_into().unwrap(),
+        Node::File {
+            digest: fixtures::BLOB_B_DIGEST.clone(),
             size: fixtures::BLOB_B.len() as u64,
             executable: false,
-        }),
+        },
     );
 }
 
 /// adds a blob containing helloworld and marks it as executable
 async fn populate_blob_helloworld(
     blob_service: &Arc<dyn BlobService>,
-    root_nodes: &mut BTreeMap<Bytes, Node>,
+    root_nodes: &mut BTreeMap<PathComponent, Node>,
 ) {
     let mut bw = blob_service.open_write().await;
     tokio::io::copy(
@@ -114,42 +113,39 @@ async fn populate_blob_helloworld(
     bw.close().await.expect("must succeed closing");
 
     root_nodes.insert(
-        HELLOWORLD_BLOB_NAME.into(),
-        Node::File(castorepb::FileNode {
-            name: HELLOWORLD_BLOB_NAME.into(),
-            digest: fixtures::HELLOWORLD_BLOB_DIGEST.clone().into(),
+        HELLOWORLD_BLOB_NAME.try_into().unwrap(),
+        Node::File {
+            digest: fixtures::HELLOWORLD_BLOB_DIGEST.clone(),
             size: fixtures::HELLOWORLD_BLOB_CONTENTS.len() as u64,
             executable: true,
-        }),
+        },
     );
 }
 
-async fn populate_symlink(root_nodes: &mut BTreeMap<Bytes, Node>) {
+async fn populate_symlink(root_nodes: &mut BTreeMap<PathComponent, Node>) {
     root_nodes.insert(
-        SYMLINK_NAME.into(),
-        Node::Symlink(castorepb::SymlinkNode {
-            name: SYMLINK_NAME.into(),
-            target: BLOB_A_NAME.into(),
-        }),
+        SYMLINK_NAME.try_into().unwrap(),
+        Node::Symlink {
+            target: BLOB_A_NAME.try_into().unwrap(),
+        },
     );
 }
 
 /// This writes a symlink pointing to /nix/store/somewhereelse,
 /// which is the same symlink target as "aa" inside DIRECTORY_COMPLICATED.
-async fn populate_symlink2(root_nodes: &mut BTreeMap<Bytes, Node>) {
+async fn populate_symlink2(root_nodes: &mut BTreeMap<PathComponent, Node>) {
     root_nodes.insert(
-        SYMLINK_NAME2.into(),
-        Node::Symlink(castorepb::SymlinkNode {
-            name: SYMLINK_NAME2.into(),
-            target: "/nix/store/somewhereelse".into(),
-        }),
+        SYMLINK_NAME2.try_into().unwrap(),
+        Node::Symlink {
+            target: "/nix/store/somewhereelse".try_into().unwrap(),
+        },
     );
 }
 
 async fn populate_directory_with_keep(
     blob_service: &Arc<dyn BlobService>,
     directory_service: &Arc<dyn DirectoryService>,
-    root_nodes: &mut BTreeMap<Bytes, Node>,
+    root_nodes: &mut BTreeMap<PathComponent, Node>,
 ) {
     // upload empty blob
     let mut bw = blob_service.open_write().await;
@@ -165,45 +161,42 @@ async fn populate_directory_with_keep(
         .expect("must succeed uploading");
 
     root_nodes.insert(
-        DIRECTORY_WITH_KEEP_NAME.into(),
-        castorepb::node::Node::Directory(castorepb::DirectoryNode {
-            name: DIRECTORY_WITH_KEEP_NAME.into(),
-            digest: fixtures::DIRECTORY_WITH_KEEP.digest().into(),
+        DIRECTORY_WITH_KEEP_NAME.try_into().unwrap(),
+        Node::Directory {
+            digest: fixtures::DIRECTORY_WITH_KEEP.digest(),
             size: fixtures::DIRECTORY_WITH_KEEP.size(),
-        }),
+        },
     );
 }
 
 /// Create a root node for DIRECTORY_WITH_KEEP, but don't upload the Directory
 /// itself.
-async fn populate_directorynode_without_directory(root_nodes: &mut BTreeMap<Bytes, Node>) {
+async fn populate_directorynode_without_directory(root_nodes: &mut BTreeMap<PathComponent, Node>) {
     root_nodes.insert(
-        DIRECTORY_WITH_KEEP_NAME.into(),
-        castorepb::node::Node::Directory(castorepb::DirectoryNode {
-            name: DIRECTORY_WITH_KEEP_NAME.into(),
-            digest: fixtures::DIRECTORY_WITH_KEEP.digest().into(),
+        DIRECTORY_WITH_KEEP_NAME.try_into().unwrap(),
+        Node::Directory {
+            digest: fixtures::DIRECTORY_WITH_KEEP.digest(),
             size: fixtures::DIRECTORY_WITH_KEEP.size(),
-        }),
+        },
     );
 }
 
 /// Insert BLOB_A, but don't provide the blob .keep is pointing to.
-async fn populate_filenode_without_blob(root_nodes: &mut BTreeMap<Bytes, Node>) {
+async fn populate_filenode_without_blob(root_nodes: &mut BTreeMap<PathComponent, Node>) {
     root_nodes.insert(
-        BLOB_A_NAME.into(),
-        Node::File(castorepb::FileNode {
-            name: BLOB_A_NAME.into(),
-            digest: fixtures::BLOB_A_DIGEST.clone().into(),
+        BLOB_A_NAME.try_into().unwrap(),
+        Node::File {
+            digest: fixtures::BLOB_A_DIGEST.clone(),
             size: fixtures::BLOB_A.len() as u64,
             executable: false,
-        }),
+        },
     );
 }
 
 async fn populate_directory_complicated(
     blob_service: &Arc<dyn BlobService>,
     directory_service: &Arc<dyn DirectoryService>,
-    root_nodes: &mut BTreeMap<Bytes, Node>,
+    root_nodes: &mut BTreeMap<PathComponent, Node>,
 ) {
     // upload empty blob
     let mut bw = blob_service.open_write().await;
@@ -225,12 +218,11 @@ async fn populate_directory_complicated(
         .expect("must succeed uploading");
 
     root_nodes.insert(
-        DIRECTORY_COMPLICATED_NAME.into(),
-        Node::Directory(castorepb::DirectoryNode {
-            name: DIRECTORY_COMPLICATED_NAME.into(),
-            digest: fixtures::DIRECTORY_COMPLICATED.digest().into(),
+        DIRECTORY_COMPLICATED_NAME.try_into().unwrap(),
+        Node::Directory {
+            digest: fixtures::DIRECTORY_COMPLICATED.digest(),
             size: fixtures::DIRECTORY_COMPLICATED.size(),
-        }),
+        },
     );
 }
 
@@ -247,7 +239,7 @@ async fn mount() {
 
     let (blob_service, directory_service) = gen_svcs();
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         BTreeMap::default(),
@@ -270,7 +262,7 @@ async fn root() {
     let tmpdir = TempDir::new().unwrap();
 
     let (blob_service, directory_service) = gen_svcs();
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         BTreeMap::default(),
@@ -304,7 +296,7 @@ async fn root_with_listing() {
 
     populate_blob_a(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -348,7 +340,7 @@ async fn stat_file_at_root() {
 
     populate_blob_a(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -385,7 +377,7 @@ async fn read_file_at_root() {
 
     populate_blob_a(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -422,7 +414,7 @@ async fn read_large_file_at_root() {
 
     populate_blob_b(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -467,7 +459,7 @@ async fn symlink_readlink() {
 
     populate_symlink(&mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -514,7 +506,7 @@ async fn read_stat_through_symlink() {
     populate_blob_a(&blob_service, &mut root_nodes).await;
     populate_symlink(&mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -559,7 +551,7 @@ async fn read_stat_directory() {
 
     populate_directory_with_keep(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -596,7 +588,7 @@ async fn xattr() {
     populate_directory_with_keep(&blob_service, &directory_service, &mut root_nodes).await;
     populate_blob_a(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -614,12 +606,12 @@ async fn xattr() {
         // There should be 1 key, XATTR_NAME_DIRECTORY_DIGEST.
         assert_eq!(1, xattr_names.len(), "there should be 1 xattr name");
         assert_eq!(
-            super::XATTR_NAME_DIRECTORY_DIGEST,
+            XATTR_NAME_DIRECTORY_DIGEST,
             xattr_names.first().unwrap().as_encoded_bytes()
         );
 
         // The key should equal to the string-formatted b3 digest.
-        let val = xattr::get(&p, OsStr::from_bytes(super::XATTR_NAME_DIRECTORY_DIGEST))
+        let val = xattr::get(&p, OsStr::from_bytes(XATTR_NAME_DIRECTORY_DIGEST))
             .expect("must succeed")
             .expect("must be some");
         assert_eq!(
@@ -643,12 +635,12 @@ async fn xattr() {
         // There should be 1 key, XATTR_NAME_BLOB_DIGEST.
         assert_eq!(1, xattr_names.len(), "there should be 1 xattr name");
         assert_eq!(
-            super::XATTR_NAME_BLOB_DIGEST,
+            XATTR_NAME_BLOB_DIGEST,
             xattr_names.first().unwrap().as_encoded_bytes()
         );
 
         // The key should equal to the string-formatted b3 digest.
-        let val = xattr::get(&p, OsStr::from_bytes(super::XATTR_NAME_BLOB_DIGEST))
+        let val = xattr::get(&p, OsStr::from_bytes(XATTR_NAME_BLOB_DIGEST))
             .expect("must succeed")
             .expect("must be some");
         assert_eq!(
@@ -679,7 +671,7 @@ async fn read_blob_inside_dir() {
 
     populate_directory_with_keep(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -719,7 +711,7 @@ async fn read_blob_deep_inside_dir() {
 
     populate_directory_complicated(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -762,7 +754,7 @@ async fn readdir() {
 
     populate_directory_complicated(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -822,7 +814,7 @@ async fn readdir_deep() {
 
     populate_directory_complicated(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -872,7 +864,7 @@ async fn check_attributes() {
     populate_symlink(&mut root_nodes).await;
     populate_blob_helloworld(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -947,7 +939,7 @@ async fn compare_inodes_directories() {
     populate_directory_with_keep(&blob_service, &directory_service, &mut root_nodes).await;
     populate_directory_complicated(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -991,7 +983,7 @@ async fn compare_inodes_files() {
 
     populate_directory_complicated(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -1040,7 +1032,7 @@ async fn compare_inodes_symlinks() {
     populate_directory_complicated(&blob_service, &directory_service, &mut root_nodes).await;
     populate_symlink2(&mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -1083,7 +1075,7 @@ async fn read_wrong_paths_in_root() {
 
     populate_blob_a(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -1138,7 +1130,7 @@ async fn disallow_writes() {
     let (blob_service, directory_service) = gen_svcs();
     let root_nodes = BTreeMap::default();
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -1170,7 +1162,7 @@ async fn missing_directory() {
 
     populate_directorynode_without_directory(&mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -1218,7 +1210,7 @@ async fn missing_blob() {
 
     populate_filenode_without_blob(&mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
diff --git a/tvix/castore/src/fs/inodes.rs b/tvix/castore/src/fs/inodes.rs
index bdd459543470..2696fdede378 100644
--- a/tvix/castore/src/fs/inodes.rs
+++ b/tvix/castore/src/fs/inodes.rs
@@ -2,10 +2,7 @@
 //! about inodes, which present tvix-castore nodes in a filesystem.
 use std::time::Duration;
 
-use bytes::Bytes;
-
-use crate::proto as castorepb;
-use crate::B3Digest;
+use crate::{path::PathComponent, B3Digest, Node};
 
 #[derive(Clone, Debug)]
 pub enum InodeData {
@@ -20,27 +17,23 @@ pub enum InodeData {
 /// lookup and did fetch the data.
 #[derive(Clone, Debug)]
 pub enum DirectoryInodeData {
-    Sparse(B3Digest, u64),                                  // digest, size
-    Populated(B3Digest, Vec<(u64, castorepb::node::Node)>), // [(child_inode, node)]
+    Sparse(B3Digest, u64),                                // digest, size
+    Populated(B3Digest, Vec<(u64, PathComponent, Node)>), // [(child_inode, name, node)]
 }
 
 impl InodeData {
     /// Constructs a new InodeData by consuming a [Node].
-    /// It splits off the orginal name, so it can be used later.
-    pub fn from_node(node: castorepb::node::Node) -> (Self, Bytes) {
+    pub fn from_node(node: &Node) -> Self {
         match node {
-            castorepb::node::Node::Directory(n) => (
-                Self::Directory(DirectoryInodeData::Sparse(
-                    n.digest.try_into().unwrap(),
-                    n.size,
-                )),
-                n.name,
-            ),
-            castorepb::node::Node::File(n) => (
-                Self::Regular(n.digest.try_into().unwrap(), n.size, n.executable),
-                n.name,
-            ),
-            castorepb::node::Node::Symlink(n) => (Self::Symlink(n.target), n.name),
+            Node::Directory { digest, size } => {
+                Self::Directory(DirectoryInodeData::Sparse(digest.clone(), *size))
+            }
+            Node::File {
+                digest,
+                size,
+                executable,
+            } => Self::Regular(digest.clone(), *size, *executable),
+            Node::Symlink { target } => Self::Symlink(target.clone().into()),
         }
     }
 
diff --git a/tvix/castore/src/fs/mod.rs b/tvix/castore/src/fs/mod.rs
index 826523131fbd..e700a25d3966 100644
--- a/tvix/castore/src/fs/mod.rs
+++ b/tvix/castore/src/fs/mod.rs
@@ -9,24 +9,19 @@ pub mod fuse;
 #[cfg(feature = "virtiofs")]
 pub mod virtiofs;
 
-#[cfg(test)]
-mod tests;
-
 pub use self::root_nodes::RootNodes;
 use self::{
     file_attr::ROOT_FILE_ATTR,
     inode_tracker::InodeTracker,
     inodes::{DirectoryInodeData, InodeData},
 };
-use crate::proto as castorepb;
 use crate::{
     blobservice::{BlobReader, BlobService},
     directoryservice::DirectoryService,
-    proto::{node::Node, NamedNode},
-    B3Digest,
+    path::PathComponent,
+    B3Digest, Node,
 };
 use bstr::ByteVec;
-use bytes::Bytes;
 use fuse_backend_rs::abi::fuse_abi::{stat64, OpenOptions};
 use fuse_backend_rs::api::filesystem::{
     Context, FileSystem, FsOptions, GetxattrReply, ListxattrReply, ROOT_ID,
@@ -46,7 +41,7 @@ use tokio::{
     io::{AsyncReadExt, AsyncSeekExt},
     sync::mpsc,
 };
-use tracing::{debug, error, instrument, warn, Span};
+use tracing::{debug, error, instrument, warn, Instrument as _, Span};
 
 /// This implements a read-only FUSE filesystem for a tvix-store
 /// with the passed [BlobService], [DirectoryService] and [RootNodes].
@@ -92,7 +87,7 @@ pub struct TvixStoreFs<BS, DS, RN> {
     show_xattr: bool,
 
     /// This maps a given basename in the root to the inode we allocated for the node.
-    root_nodes: RwLock<HashMap<Bytes, u64>>,
+    root_nodes: RwLock<HashMap<PathComponent, u64>>,
 
     /// This keeps track of inodes and data alongside them.
     inode_tracker: RwLock<InodeTracker>,
@@ -108,7 +103,7 @@ pub struct TvixStoreFs<BS, DS, RN> {
             u64,
             (
                 Span,
-                Arc<Mutex<mpsc::Receiver<(usize, Result<Node, crate::Error>)>>>,
+                Arc<Mutex<mpsc::Receiver<(usize, Result<(PathComponent, Node), crate::Error>)>>>,
             ),
         >,
     >,
@@ -159,7 +154,7 @@ where
 
     /// Retrieves the inode for a given root node basename, if present.
     /// This obtains a read lock on self.root_nodes.
-    fn get_inode_for_root_name(&self, name: &[u8]) -> Option<u64> {
+    fn get_inode_for_root_name(&self, name: &PathComponent) -> Option<u64> {
         self.root_nodes.read().get(name).cloned()
     }
 
@@ -170,8 +165,12 @@ where
     /// It is ok if it's a [DirectoryInodeData::Sparse] - in that case, a lookup
     /// in self.directory_service is performed, and self.inode_tracker is updated with the
     /// [DirectoryInodeData::Populated].
+    #[allow(clippy::type_complexity)]
     #[instrument(skip(self), err)]
-    fn get_directory_children(&self, ino: u64) -> io::Result<(B3Digest, Vec<(u64, Node)>)> {
+    fn get_directory_children(
+        &self,
+        ino: u64,
+    ) -> io::Result<(B3Digest, Vec<(u64, PathComponent, Node)>)> {
         let data = self.inode_tracker.read().get(ino).unwrap();
         match *data {
             // if it's populated already, return children.
@@ -201,13 +200,13 @@ where
                 let children = {
                     let mut inode_tracker = self.inode_tracker.write();
 
-                    let children: Vec<(u64, castorepb::node::Node)> = directory
-                        .nodes()
-                        .map(|child_node| {
-                            let (inode_data, _) = InodeData::from_node(child_node.clone());
+                    let children: Vec<(u64, PathComponent, Node)> = directory
+                        .into_nodes()
+                        .map(|(child_name, child_node)| {
+                            let inode_data = InodeData::from_node(&child_node);
 
                             let child_ino = inode_tracker.put(inode_data);
-                            (child_ino, child_node)
+                            (child_ino, child_name, child_node)
                         })
                         .collect();
 
@@ -241,12 +240,12 @@ where
     /// In the case the name can't be found, a libc::ENOENT is returned.
     fn name_in_root_to_ino_and_data(
         &self,
-        name: &std::ffi::CStr,
+        name: &PathComponent,
     ) -> io::Result<(u64, Arc<InodeData>)> {
         // Look up the inode for that root node.
         // If there's one, [self.inode_tracker] MUST also contain the data,
         // which we can then return.
-        if let Some(inode) = self.get_inode_for_root_name(name.to_bytes()) {
+        if let Some(inode) = self.get_inode_for_root_name(name) {
             return Ok((
                 inode,
                 self.inode_tracker
@@ -260,7 +259,8 @@ where
         // We don't have it yet, look it up in [self.root_nodes].
         match self.tokio_handle.block_on({
             let root_nodes_provider = self.root_nodes_provider.clone();
-            async move { root_nodes_provider.get_by_basename(name.to_bytes()).await }
+            let name = name.clone();
+            async move { root_nodes_provider.get_by_basename(&name).await }
         }) {
             // if there was an error looking up the root node, propagate up an IO error.
             Err(_e) => Err(io::Error::from_raw_os_error(libc::EIO)),
@@ -268,15 +268,9 @@ where
             Ok(None) => Err(io::Error::from_raw_os_error(libc::ENOENT)),
             // The root node does exist
             Ok(Some(root_node)) => {
-                // The name must match what's passed in the lookup, otherwise this is also a ENOENT.
-                if root_node.get_name() != name.to_bytes() {
-                    debug!(root_node.name=?root_node.get_name(), found_node.name=%name.to_string_lossy(), "node name mismatch");
-                    return Err(io::Error::from_raw_os_error(libc::ENOENT));
-                }
-
                 // Let's check if someone else beat us to updating the inode tracker and
                 // root_nodes map. This avoids locking inode_tracker for writing.
-                if let Some(ino) = self.root_nodes.read().get(name.to_bytes()) {
+                if let Some(ino) = self.root_nodes.read().get(name) {
                     return Ok((
                         *ino,
                         self.inode_tracker.read().get(*ino).expect("must exist"),
@@ -290,9 +284,9 @@ where
 
                 // insert the (sparse) inode data and register in
                 // self.root_nodes.
-                let (inode_data, name) = InodeData::from_node(root_node);
+                let inode_data = InodeData::from_node(&root_node);
                 let ino = inode_tracker.put(inode_data.clone());
-                root_nodes.insert(name, ino);
+                root_nodes.insert(name.to_owned(), ino);
 
                 Ok((ino, Arc::new(inode_data)))
             }
@@ -348,13 +342,17 @@ where
     ) -> io::Result<fuse_backend_rs::api::filesystem::Entry> {
         debug!("lookup");
 
+        // convert the CStr to a PathComponent
+        // If it can't be converted, we definitely don't have anything here.
+        let name: PathComponent = name.try_into().map_err(|_| std::io::ErrorKind::NotFound)?;
+
         // This goes from a parent inode to a node.
         // - If the parent is [ROOT_ID], we need to check
         //   [self.root_nodes] (fetching from a [RootNode] provider if needed)
         // - Otherwise, lookup the parent in [self.inode_tracker] (which must be
         //   a [InodeData::Directory]), and find the child with that name.
         if parent == ROOT_ID {
-            let (ino, inode_data) = self.name_in_root_to_ino_and_data(name)?;
+            let (ino, inode_data) = self.name_in_root_to_ino_and_data(&name)?;
 
             debug!(inode_data=?&inode_data, ino=ino, "Some");
             return Ok(inode_data.as_fuse_entry(ino));
@@ -367,7 +365,7 @@ where
         // Search for that name in the list of children and return the FileAttrs.
 
         // in the children, find the one with the desired name.
-        if let Some((child_ino, _)) = children.iter().find(|e| e.1.get_name() == name.to_bytes()) {
+        if let Some((child_ino, _, _)) = children.iter().find(|(_, n, _)| n == &name) {
             // lookup the child [InodeData] in [self.inode_tracker].
             // We know the inodes for children have already been allocated.
             let child_inode_data = self.inode_tracker.read().get(*child_ino).unwrap();
@@ -400,16 +398,20 @@ where
 
             // This task will run in the background immediately and will exit
             // after the stream ends or if we no longer want any more entries.
-            self.tokio_handle.spawn(async move {
-                let mut stream = root_nodes_provider.list().enumerate();
-                while let Some(node) = stream.next().await {
-                    if tx.send(node).await.is_err() {
-                        // If we get a send error, it means the sync code
-                        // doesn't want any more entries.
-                        break;
+            self.tokio_handle.spawn(
+                async move {
+                    let mut stream = root_nodes_provider.list().enumerate();
+                    while let Some(e) = stream.next().await {
+                        if tx.send(e).await.is_err() {
+                            // If we get a send error, it means the sync code
+                            // doesn't want any more entries.
+                            break;
+                        }
                     }
                 }
-            });
+                // instrument the task with the current span, this is not done by default
+                .in_current_span(),
+            );
 
             // Put the rx part into [self.dir_handles].
             // TODO: this will overflow after 2**64 operations,
@@ -462,12 +464,12 @@ where
                 .map_err(|_| crate::Error::StorageError("mutex poisoned".into()))?;
 
             while let Some((i, n)) = rx.blocking_recv() {
-                let root_node = n.map_err(|e| {
+                let (name, node) = n.map_err(|e| {
                     warn!("failed to retrieve root node: {}", e);
                     io::Error::from_raw_os_error(libc::EIO)
                 })?;
 
-                let (inode_data, name) = InodeData::from_node(root_node);
+                let inode_data = InodeData::from_node(&node);
 
                 // obtain the inode, or allocate a new one.
                 let ino = self.get_inode_for_root_name(&name).unwrap_or_else(|| {
@@ -482,7 +484,7 @@ where
                     ino,
                     offset: offset + (i as u64) + 1,
                     type_: inode_data.as_fuse_type(),
-                    name: &name,
+                    name: name.as_ref(),
                 })?;
                 // If the buffer is full, add_entry will return `Ok(0)`.
                 if written == 0 {
@@ -496,15 +498,17 @@ where
         let (parent_digest, children) = self.get_directory_children(inode)?;
         Span::current().record("directory.digest", parent_digest.to_string());
 
-        for (i, (ino, child_node)) in children.into_iter().skip(offset as usize).enumerate() {
-            let (inode_data, name) = InodeData::from_node(child_node);
+        for (i, (ino, child_name, child_node)) in
+            children.into_iter().skip(offset as usize).enumerate()
+        {
+            let inode_data = InodeData::from_node(&child_node);
 
             // the second parameter will become the "offset" parameter on the next call.
             let written = add_entry(fuse_backend_rs::api::filesystem::DirEntry {
                 ino,
                 offset: offset + (i as u64) + 1,
                 type_: inode_data.as_fuse_type(),
-                name: &name,
+                name: child_name.as_ref(),
             })?;
             // If the buffer is full, add_entry will return `Ok(0)`.
             if written == 0 {
@@ -549,12 +553,12 @@ where
                 .map_err(|_| crate::Error::StorageError("mutex poisoned".into()))?;
 
             while let Some((i, n)) = rx.blocking_recv() {
-                let root_node = n.map_err(|e| {
+                let (name, node) = n.map_err(|e| {
                     warn!("failed to retrieve root node: {}", e);
                     io::Error::from_raw_os_error(libc::EPERM)
                 })?;
 
-                let (inode_data, name) = InodeData::from_node(root_node);
+                let inode_data = InodeData::from_node(&node);
 
                 // obtain the inode, or allocate a new one.
                 let ino = self.get_inode_for_root_name(&name).unwrap_or_else(|| {
@@ -570,7 +574,7 @@ where
                         ino,
                         offset: offset + (i as u64) + 1,
                         type_: inode_data.as_fuse_type(),
-                        name: &name,
+                        name: name.as_ref(),
                     },
                     inode_data.as_fuse_entry(ino),
                 )?;
@@ -586,8 +590,8 @@ where
         let (parent_digest, children) = self.get_directory_children(inode)?;
         Span::current().record("directory.digest", parent_digest.to_string());
 
-        for (i, (ino, child_node)) in children.into_iter().skip(offset as usize).enumerate() {
-            let (inode_data, name) = InodeData::from_node(child_node);
+        for (i, (ino, name, child_node)) in children.into_iter().skip(offset as usize).enumerate() {
+            let inode_data = InodeData::from_node(&child_node);
 
             // the second parameter will become the "offset" parameter on the next call.
             let written = add_entry(
@@ -595,7 +599,7 @@ where
                     ino,
                     offset: offset + (i as u64) + 1,
                     type_: inode_data.as_fuse_type(),
-                    name: &name,
+                    name: name.as_ref(),
                 },
                 inode_data.as_fuse_entry(ino),
             )?;
diff --git a/tvix/castore/src/fs/root_nodes.rs b/tvix/castore/src/fs/root_nodes.rs
index 6609e049a1fc..5ed1a4d8d6c0 100644
--- a/tvix/castore/src/fs/root_nodes.rs
+++ b/tvix/castore/src/fs/root_nodes.rs
@@ -1,7 +1,6 @@
 use std::collections::BTreeMap;
 
-use crate::{proto::node::Node, Error};
-use bytes::Bytes;
+use crate::{path::PathComponent, Error, Node};
 use futures::stream::BoxStream;
 use tonic::async_trait;
 
@@ -11,11 +10,12 @@ use tonic::async_trait;
 pub trait RootNodes: Send + Sync {
     /// Looks up a root CA node based on the basename of the node in the root
     /// directory of the filesystem.
-    async fn get_by_basename(&self, name: &[u8]) -> Result<Option<Node>, Error>;
+    async fn get_by_basename(&self, name: &PathComponent) -> Result<Option<Node>, Error>;
 
-    /// Lists all root CA nodes in the filesystem. An error can be returned
-    /// in case listing is not allowed
-    fn list(&self) -> BoxStream<Result<Node, Error>>;
+    /// Lists all root CA nodes in the filesystem, as a tuple of (base)name
+    /// and Node.
+    /// An error can be returned in case listing is not allowed.
+    fn list(&self) -> BoxStream<Result<(PathComponent, Node), Error>>;
 }
 
 #[async_trait]
@@ -23,15 +23,17 @@ pub trait RootNodes: Send + Sync {
 /// the key is the node name.
 impl<T> RootNodes for T
 where
-    T: AsRef<BTreeMap<Bytes, Node>> + Send + Sync,
+    T: AsRef<BTreeMap<PathComponent, Node>> + Send + Sync,
 {
-    async fn get_by_basename(&self, name: &[u8]) -> Result<Option<Node>, Error> {
+    async fn get_by_basename(&self, name: &PathComponent) -> Result<Option<Node>, Error> {
         Ok(self.as_ref().get(name).cloned())
     }
 
-    fn list(&self) -> BoxStream<Result<Node, Error>> {
+    fn list(&self) -> BoxStream<Result<(PathComponent, Node), Error>> {
         Box::pin(tokio_stream::iter(
-            self.as_ref().iter().map(|(_, v)| Ok(v.clone())),
+            self.as_ref()
+                .iter()
+                .map(|(name, node)| Ok((name.to_owned(), node.to_owned()))),
         ))
     }
 }
diff --git a/tvix/castore/src/import/archive.rs b/tvix/castore/src/import/archive.rs
index cd5b1290e031..167f799efa0f 100644
--- a/tvix/castore/src/import/archive.rs
+++ b/tvix/castore/src/import/archive.rs
@@ -13,7 +13,7 @@ use tracing::{instrument, warn, Level};
 use crate::blobservice::BlobService;
 use crate::directoryservice::DirectoryService;
 use crate::import::{ingest_entries, IngestionEntry, IngestionError};
-use crate::proto::node::Node;
+use crate::Node;
 
 use super::blobs::{self, ConcurrentBlobUploader};
 
diff --git a/tvix/castore/src/import/blobs.rs b/tvix/castore/src/import/blobs.rs
index 8135d871d6c0..f71ee1e63768 100644
--- a/tvix/castore/src/import/blobs.rs
+++ b/tvix/castore/src/import/blobs.rs
@@ -28,6 +28,9 @@ pub enum Error {
     #[error("unable to read blob contents for {0}: {1}")]
     BlobRead(PathBuf, std::io::Error),
 
+    #[error("unable to check whether blob at {0} already exists: {1}")]
+    BlobCheck(PathBuf, std::io::Error),
+
     // FUTUREWORK: proper error for blob finalize
     #[error("unable to finalize blob {0}: {1}")]
     BlobFinalize(PathBuf, std::io::Error),
@@ -118,6 +121,16 @@ where
                 let path = path.to_owned();
                 let r = Cursor::new(buffer);
                 async move {
+                    // We know the blob digest already, check it exists before sending it.
+                    if blob_service
+                        .has(&expected_digest)
+                        .await
+                        .map_err(|e| Error::BlobCheck(path.clone(), e))?
+                    {
+                        drop(permit);
+                        return Ok(());
+                    }
+
                     let digest = upload_blob(&blob_service, &path, expected_size, r).await?;
 
                     assert_eq!(digest, expected_digest, "Tvix bug: blob digest mismatch");
diff --git a/tvix/castore/src/import/fs.rs b/tvix/castore/src/import/fs.rs
index 9d3ecfe6ab7a..1332fdfe57b5 100644
--- a/tvix/castore/src/import/fs.rs
+++ b/tvix/castore/src/import/fs.rs
@@ -6,14 +6,17 @@ use std::fs::FileType;
 use std::os::unix::ffi::OsStringExt;
 use std::os::unix::fs::MetadataExt;
 use std::os::unix::fs::PermissionsExt;
+use tokio::io::BufReader;
+use tokio_util::io::InspectReader;
 use tracing::instrument;
+use tracing::Span;
+use tracing_indicatif::span_ext::IndicatifSpanExt;
 use walkdir::DirEntry;
 use walkdir::WalkDir;
 
 use crate::blobservice::BlobService;
 use crate::directoryservice::DirectoryService;
-use crate::proto::node::Node;
-use crate::B3Digest;
+use crate::{B3Digest, Node};
 
 use super::ingest_entries;
 use super::IngestionEntry;
@@ -26,7 +29,7 @@ use super::IngestionError;
 ///
 /// This function will walk the filesystem using `walkdir` and will consume
 /// `O(#number of entries)` space.
-#[instrument(skip(blob_service, directory_service), fields(path), err)]
+#[instrument(skip(blob_service, directory_service), fields(path, indicatif.pb_show=1), err)]
 pub async fn ingest_path<BS, DS, P>(
     blob_service: BS,
     directory_service: DS,
@@ -37,6 +40,10 @@ where
     BS: BlobService + Clone,
     DS: DirectoryService,
 {
+    let span = Span::current();
+    span.pb_set_message(&format!("Ingesting {:?}", path));
+    span.pb_start();
+
     let iter = WalkDir::new(path.as_ref())
         .follow_links(false)
         .follow_root_links(false)
@@ -44,7 +51,18 @@ where
         .into_iter();
 
     let entries = dir_entries_to_ingestion_stream(blob_service, iter, path.as_ref());
-    ingest_entries(directory_service, entries).await
+    ingest_entries(
+        directory_service,
+        entries.inspect({
+            let span = span.clone();
+            move |e| {
+                if e.is_ok() {
+                    span.pb_inc(1)
+                }
+            }
+        }),
+    )
+    .await
 }
 
 /// Converts an iterator of [walkdir::DirEntry]s into a stream of ingestion entries.
@@ -138,7 +156,7 @@ where
 }
 
 /// Uploads the file at the provided [Path] the the [BlobService].
-#[instrument(skip(blob_service), fields(path), err)]
+#[instrument(skip(blob_service), fields(path, indicatif.pb_show=1), err)]
 async fn upload_blob<BS>(
     blob_service: BS,
     path: impl AsRef<std::path::Path>,
@@ -146,16 +164,29 @@ async fn upload_blob<BS>(
 where
     BS: BlobService,
 {
-    let mut file = match tokio::fs::File::open(path.as_ref()).await {
-        Ok(file) => file,
-        Err(e) => return Err(Error::BlobRead(path.as_ref().to_path_buf(), e)),
-    };
+    let span = Span::current();
+    span.pb_set_style(&tvix_tracing::PB_TRANSFER_STYLE);
+    span.pb_set_message(&format!("Uploading blob for {:?}", path.as_ref()));
+    span.pb_start();
 
-    let mut writer = blob_service.open_write().await;
+    let file = tokio::fs::File::open(path.as_ref())
+        .await
+        .map_err(|e| Error::BlobRead(path.as_ref().to_path_buf(), e))?;
 
-    if let Err(e) = tokio::io::copy(&mut file, &mut writer).await {
-        return Err(Error::BlobRead(path.as_ref().to_path_buf(), e));
-    };
+    let metadata = file
+        .metadata()
+        .await
+        .map_err(|e| Error::Stat(path.as_ref().to_path_buf(), e))?;
+
+    span.pb_set_length(metadata.len());
+    let reader = InspectReader::new(file, |d| {
+        span.pb_inc(d.len() as u64);
+    });
+
+    let mut writer = blob_service.open_write().await;
+    tokio::io::copy(&mut BufReader::new(reader), &mut writer)
+        .await
+        .map_err(|e| Error::BlobRead(path.as_ref().to_path_buf(), e))?;
 
     let digest = writer
         .close()
diff --git a/tvix/castore/src/import/mod.rs b/tvix/castore/src/import/mod.rs
index 4223fe538756..6e10a64939a4 100644
--- a/tvix/castore/src/import/mod.rs
+++ b/tvix/castore/src/import/mod.rs
@@ -4,17 +4,10 @@
 //! Specific implementations, such as ingesting from the filesystem, live in
 //! child modules.
 
-use crate::directoryservice::DirectoryPutter;
-use crate::directoryservice::DirectoryService;
+use crate::directoryservice::{DirectoryPutter, DirectoryService};
 use crate::path::{Path, PathBuf};
-use crate::proto::node::Node;
-use crate::proto::Directory;
-use crate::proto::DirectoryNode;
-use crate::proto::FileNode;
-use crate::proto::SymlinkNode;
-use crate::B3Digest;
+use crate::{B3Digest, Directory, Node, SymlinkTargetError};
 use futures::{Stream, StreamExt};
-
 use tracing::Level;
 
 use std::collections::HashMap;
@@ -66,14 +59,6 @@ where
             // we break the loop manually.
             .expect("Tvix bug: unexpected end of stream")?;
 
-        let name = entry
-            .path()
-            .file_name()
-            // If this is the root node, it will have an empty name.
-            .unwrap_or_default()
-            .to_owned()
-            .into();
-
         let node = match &mut entry {
             IngestionEntry::Dir { .. } => {
                 // If the entry is a directory, we traversed all its children (and
@@ -99,27 +84,31 @@ where
                         IngestionError::UploadDirectoryError(entry.path().to_owned(), e)
                     })?;
 
-                Node::Directory(DirectoryNode {
-                    name,
-                    digest: directory_digest.into(),
+                Node::Directory {
+                    digest: directory_digest,
                     size: directory_size,
-                })
+                }
             }
-            IngestionEntry::Symlink { ref target, .. } => Node::Symlink(SymlinkNode {
-                name,
-                target: target.to_owned().into(),
-            }),
+            IngestionEntry::Symlink { ref target, .. } => Node::Symlink {
+                target: bytes::Bytes::copy_from_slice(target).try_into().map_err(
+                    |e: SymlinkTargetError| {
+                        IngestionError::UploadDirectoryError(
+                            entry.path().to_owned(),
+                            crate::Error::StorageError(format!("invalid symlink target: {}", e)),
+                        )
+                    },
+                )?,
+            },
             IngestionEntry::Regular {
                 size,
                 executable,
                 digest,
                 ..
-            } => Node::File(FileNode {
-                name,
-                digest: digest.to_owned().into(),
+            } => Node::File {
+                digest: digest.clone(),
                 size: *size,
                 executable: *executable,
-            }),
+            },
         };
 
         let parent = entry
@@ -130,8 +119,24 @@ where
         if parent == crate::Path::ROOT {
             break node;
         } else {
+            let name = entry
+                .path()
+                .file_name()
+                // If this is the root node, it will have an empty name.
+                .unwrap_or_else(|| "".try_into().unwrap())
+                .to_owned();
+
             // record node in parent directory, creating a new [Directory] if not there yet.
-            directories.entry(parent.to_owned()).or_default().add(node);
+            directories
+                .entry(parent.to_owned())
+                .or_default()
+                .add(name, node)
+                .map_err(|e| {
+                    IngestionError::UploadDirectoryError(
+                        entry.path().to_owned(),
+                        crate::Error::StorageError(e.to_string()),
+                    )
+                })?;
         }
     };
 
@@ -156,15 +161,8 @@ where
 
         #[cfg(debug_assertions)]
         {
-            if let Node::Directory(directory_node) = &root_node {
-                debug_assert_eq!(
-                    root_directory_digest,
-                    directory_node
-                        .digest
-                        .to_vec()
-                        .try_into()
-                        .expect("invalid digest len")
-                )
+            if let Node::Directory { digest, .. } = &root_node {
+                debug_assert_eq!(&root_directory_digest, digest);
             } else {
                 unreachable!("Tvix bug: directory putter initialized but no root directory node");
             }
@@ -210,9 +208,8 @@ mod test {
     use rstest::rstest;
 
     use crate::fixtures::{DIRECTORY_COMPLICATED, DIRECTORY_WITH_KEEP, EMPTY_BLOB_DIGEST};
-    use crate::proto::node::Node;
-    use crate::proto::{Directory, DirectoryNode, FileNode, SymlinkNode};
     use crate::{directoryservice::MemoryDirectoryService, fixtures::DUMMY_DIGEST};
+    use crate::{Directory, Node};
 
     use super::ingest_entries;
     use super::IngestionEntry;
@@ -224,18 +221,18 @@ mod test {
         executable: true,
         digest: DUMMY_DIGEST.clone(),
     }],
-        Node::File(FileNode { name: "foo".into(), digest: DUMMY_DIGEST.clone().into(), size: 42, executable: true }
-    ))]
+        Node::File{digest: DUMMY_DIGEST.clone(), size: 42, executable: true}
+    )]
     #[case::single_symlink(vec![IngestionEntry::Symlink {
         path: "foo".parse().unwrap(),
         target: b"blub".into(),
     }],
-        Node::Symlink(SymlinkNode { name: "foo".into(), target: "blub".into()})
+        Node::Symlink{target: "blub".try_into().unwrap()}
     )]
     #[case::single_dir(vec![IngestionEntry::Dir {
         path: "foo".parse().unwrap(),
     }],
-        Node::Directory(DirectoryNode { name: "foo".into(), digest: Directory::default().digest().into(), size: Directory::default().size()})
+        Node::Directory{digest: Directory::default().digest(), size: Directory::default().size()}
     )]
     #[case::dir_with_keep(vec![
         IngestionEntry::Regular {
@@ -248,7 +245,7 @@ mod test {
             path: "foo".parse().unwrap(),
         },
     ],
-        Node::Directory(DirectoryNode { name: "foo".into(), digest: DIRECTORY_WITH_KEEP.digest().into(), size: DIRECTORY_WITH_KEEP.size() })
+        Node::Directory{ digest: DIRECTORY_WITH_KEEP.digest(), size: DIRECTORY_WITH_KEEP.size()}
     )]
     /// This is intentionally a bit unsorted, though it still satisfies all
     /// requirements we have on the order of elements in the stream.
@@ -276,7 +273,7 @@ mod test {
             path: "blub".parse().unwrap(),
         },
     ],
-        Node::Directory(DirectoryNode { name: "blub".into(), digest: DIRECTORY_COMPLICATED.digest().into(), size:DIRECTORY_COMPLICATED.size() })
+    Node::Directory{ digest: DIRECTORY_COMPLICATED.digest(), size: DIRECTORY_COMPLICATED.size() }
     )]
     #[tokio::test]
     async fn test_ingestion(#[case] entries: Vec<IngestionEntry>, #[case] exp_root_node: Node) {
diff --git a/tvix/castore/src/lib.rs b/tvix/castore/src/lib.rs
index bdc533a8c5e6..8ac6ca3dd66a 100644
--- a/tvix/castore/src/lib.rs
+++ b/tvix/castore/src/lib.rs
@@ -3,21 +3,25 @@ mod errors;
 mod hashing_reader;
 
 pub mod blobservice;
+pub mod composition;
 pub mod directoryservice;
 pub mod fixtures;
 
 #[cfg(feature = "fs")]
 pub mod fs;
 
+mod nodes;
+pub use nodes::*;
+
 mod path;
-pub use path::{Path, PathBuf};
+pub use path::{Path, PathBuf, PathComponent, PathComponentError};
 
 pub mod import;
 pub mod proto;
 pub mod tonic;
 
 pub use digests::{B3Digest, B3_LEN};
-pub use errors::Error;
+pub use errors::{DirectoryError, Error, ValidateNodeError};
 pub use hashing_reader::{B3HashingReader, HashingReader};
 
 #[cfg(test)]
diff --git a/tvix/castore/src/nodes/directory.rs b/tvix/castore/src/nodes/directory.rs
new file mode 100644
index 000000000000..f80e055dde80
--- /dev/null
+++ b/tvix/castore/src/nodes/directory.rs
@@ -0,0 +1,287 @@
+use std::collections::btree_map::{self, BTreeMap};
+
+use crate::{errors::DirectoryError, path::PathComponent, proto, B3Digest, Node};
+
+/// A Directory contains nodes, which can be Directory, File or Symlink nodes.
+/// It attaches names to these nodes, which is the basename in that directory.
+/// These names:
+///  - MUST not contain slashes or null bytes
+///  - MUST not be '.' or '..'
+///  - MUST be unique across all three lists
+#[derive(Default, Debug, Clone, PartialEq, Eq)]
+pub struct Directory {
+    nodes: BTreeMap<PathComponent, Node>,
+}
+
+impl Directory {
+    /// Constructs a new, empty Directory.
+    pub fn new() -> Self {
+        Directory {
+            nodes: BTreeMap::new(),
+        }
+    }
+
+    /// Construct a [Directory] from tuples of name and [Node].
+    ///
+    /// Inserting multiple elements with the same name will yield an error, as
+    /// well as exceeding the maximum size.
+    pub fn try_from_iter<T: IntoIterator<Item = (PathComponent, Node)>>(
+        iter: T,
+    ) -> Result<Directory, DirectoryError> {
+        let mut nodes = BTreeMap::new();
+
+        iter.into_iter().try_fold(0u64, |size, (name, node)| {
+            check_insert_node(size, &mut nodes, name, node)
+        })?;
+
+        Ok(Self { nodes })
+    }
+
+    /// The size of a directory is the number of all regular and symlink elements,
+    /// the number of directory elements, and their size fields.
+    pub fn size(&self) -> u64 {
+        // It's impossible to create a Directory where the size overflows, because we
+        // check before every add() that the size won't overflow.
+        (self.nodes.len() as u64)
+            + self
+                .nodes()
+                .map(|(_name, n)| match n {
+                    Node::Directory { size, .. } => 1 + size,
+                    Node::File { .. } | Node::Symlink { .. } => 1,
+                })
+                .sum::<u64>()
+    }
+
+    /// Calculates the digest of a Directory, which is the blake3 hash of a
+    /// Directory protobuf message, serialized in protobuf canonical form.
+    pub fn digest(&self) -> B3Digest {
+        proto::Directory::from(self.clone()).digest()
+    }
+
+    /// Allows iterating over all nodes (directories, files and symlinks)
+    /// For each, it returns a tuple of its name and node.
+    /// The elements are sorted by their names.
+    pub fn nodes(&self) -> impl Iterator<Item = (&PathComponent, &Node)> + Send + Sync + '_ {
+        self.nodes.iter()
+    }
+
+    /// Dissolves a Directory into its individual names and nodes.
+    /// The elements are sorted by their names.
+    pub fn into_nodes(self) -> impl Iterator<Item = (PathComponent, Node)> + Send + Sync {
+        self.nodes.into_iter()
+    }
+
+    /// Adds the specified [Node] to the [Directory] with a given name.
+    ///
+    /// Inserting a node that already exists with the same name in the directory
+    /// will yield an error, as well as exceeding the maximum size.
+    ///
+    /// In case you want to construct a [Directory] from multiple elements, use
+    /// [from_iter] instead.
+    pub fn add(&mut self, name: PathComponent, node: Node) -> Result<(), DirectoryError> {
+        check_insert_node(self.size(), &mut self.nodes, name, node)?;
+        Ok(())
+    }
+}
+
+fn checked_sum(iter: impl IntoIterator<Item = u64>) -> Option<u64> {
+    iter.into_iter().try_fold(0u64, |acc, i| acc.checked_add(i))
+}
+
+/// Helper function dealing with inserting nodes into the nodes [BTreeMap],
+/// after ensuring the new size doesn't overlow and the key doesn't exist already.
+///
+/// Returns the new total size, or an error.
+fn check_insert_node(
+    current_size: u64,
+    nodes: &mut BTreeMap<PathComponent, Node>,
+    name: PathComponent,
+    node: Node,
+) -> Result<u64, DirectoryError> {
+    // Check that the even after adding this new directory entry, the size calculation will not
+    // overflow
+    let new_size = checked_sum([
+        current_size,
+        1,
+        match node {
+            Node::Directory { size, .. } => size,
+            _ => 0,
+        },
+    ])
+    .ok_or(DirectoryError::SizeOverflow)?;
+
+    match nodes.entry(name) {
+        btree_map::Entry::Vacant(e) => {
+            e.insert(node);
+        }
+        btree_map::Entry::Occupied(occupied) => {
+            return Err(DirectoryError::DuplicateName(occupied.key().to_owned()))
+        }
+    }
+
+    Ok(new_size)
+}
+
+#[cfg(test)]
+mod test {
+    use super::{Directory, Node};
+    use crate::fixtures::DUMMY_DIGEST;
+    use crate::{DirectoryError, PathComponent};
+
+    #[test]
+    fn from_iter_single() {
+        Directory::try_from_iter([(
+            PathComponent::try_from("b").unwrap(),
+            Node::Directory {
+                digest: DUMMY_DIGEST.clone(),
+                size: 1,
+            },
+        )])
+        .unwrap();
+    }
+
+    #[test]
+    fn from_iter_multiple() {
+        let d = Directory::try_from_iter([
+            (
+                "b".try_into().unwrap(),
+                Node::Directory {
+                    digest: DUMMY_DIGEST.clone(),
+                    size: 1,
+                },
+            ),
+            (
+                "a".try_into().unwrap(),
+                Node::Directory {
+                    digest: DUMMY_DIGEST.clone(),
+                    size: 1,
+                },
+            ),
+            (
+                "z".try_into().unwrap(),
+                Node::Directory {
+                    digest: DUMMY_DIGEST.clone(),
+                    size: 1,
+                },
+            ),
+            (
+                "f".try_into().unwrap(),
+                Node::File {
+                    digest: DUMMY_DIGEST.clone(),
+                    size: 1,
+                    executable: true,
+                },
+            ),
+            (
+                "c".try_into().unwrap(),
+                Node::File {
+                    digest: DUMMY_DIGEST.clone(),
+                    size: 1,
+                    executable: true,
+                },
+            ),
+            (
+                "g".try_into().unwrap(),
+                Node::File {
+                    digest: DUMMY_DIGEST.clone(),
+                    size: 1,
+                    executable: true,
+                },
+            ),
+            (
+                "t".try_into().unwrap(),
+                Node::Symlink {
+                    target: "a".try_into().unwrap(),
+                },
+            ),
+            (
+                "o".try_into().unwrap(),
+                Node::Symlink {
+                    target: "a".try_into().unwrap(),
+                },
+            ),
+            (
+                "e".try_into().unwrap(),
+                Node::Symlink {
+                    target: "a".try_into().unwrap(),
+                },
+            ),
+        ])
+        .unwrap();
+
+        // Convert to proto struct and back to ensure we are not generating any invalid structures
+        crate::Directory::try_from(crate::proto::Directory::from(d))
+            .expect("directory should be valid");
+    }
+
+    #[test]
+    fn add_nodes_to_directory() {
+        let mut d = Directory::new();
+
+        d.add(
+            "b".try_into().unwrap(),
+            Node::Directory {
+                digest: DUMMY_DIGEST.clone(),
+                size: 1,
+            },
+        )
+        .unwrap();
+        d.add(
+            "a".try_into().unwrap(),
+            Node::Directory {
+                digest: DUMMY_DIGEST.clone(),
+                size: 1,
+            },
+        )
+        .unwrap();
+
+        // Convert to proto struct and back to ensure we are not generating any invalid structures
+        crate::Directory::try_from(crate::proto::Directory::from(d))
+            .expect("directory should be valid");
+    }
+
+    #[test]
+    fn validate_overflow() {
+        let mut d = Directory::new();
+
+        assert_eq!(
+            d.add(
+                "foo".try_into().unwrap(),
+                Node::Directory {
+                    digest: DUMMY_DIGEST.clone(),
+                    size: u64::MAX
+                }
+            ),
+            Err(DirectoryError::SizeOverflow)
+        );
+    }
+
+    #[test]
+    fn add_duplicate_node_to_directory() {
+        let mut d = Directory::new();
+
+        d.add(
+            "a".try_into().unwrap(),
+            Node::Directory {
+                digest: DUMMY_DIGEST.clone(),
+                size: 1,
+            },
+        )
+        .unwrap();
+        assert_eq!(
+            format!(
+                "{}",
+                d.add(
+                    "a".try_into().unwrap(),
+                    Node::File {
+                        digest: DUMMY_DIGEST.clone(),
+                        size: 1,
+                        executable: true
+                    }
+                )
+                .expect_err("adding duplicate dir entry must fail")
+            ),
+            "\"a\" is a duplicate name"
+        );
+    }
+}
diff --git a/tvix/castore/src/nodes/mod.rs b/tvix/castore/src/nodes/mod.rs
new file mode 100644
index 000000000000..ac7aa1e666df
--- /dev/null
+++ b/tvix/castore/src/nodes/mod.rs
@@ -0,0 +1,48 @@
+//! This holds types describing nodes in the tvix-castore model.
+mod directory;
+mod symlink_target;
+
+use crate::B3Digest;
+pub use directory::Directory;
+pub use symlink_target::{SymlinkTarget, SymlinkTargetError};
+
+/// A Node is either a [DirectoryNode], [FileNode] or [SymlinkNode].
+/// Nodes themselves don't have names, what gives them names is either them
+/// being inside a [Directory], or a root node with its own name attached to it.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum Node {
+    /// A DirectoryNode is a pointer to a [Directory], by its [Directory::digest].
+    /// It also records a`size`.
+    /// Such a node is either an element in the [Directory] it itself is contained in,
+    /// or a standalone root node.
+    Directory {
+        /// The blake3 hash of a Directory message, serialized in protobuf canonical form.
+        digest: B3Digest,
+        /// Number of child elements in the Directory referred to by `digest`.
+        /// Calculated by summing up the numbers of nodes, and for each directory,
+        /// its size field. Can be used for inode allocation.
+        /// This field is precisely as verifiable as any other Merkle tree edge.
+        /// Resolve `digest`, and you can compute it incrementally. Resolve the entire
+        /// tree, and you can fully compute it from scratch.
+        /// A credulous implementation won't reject an excessive size, but this is
+        /// harmless: you'll have some ordinals without nodes. Undersizing is obvious
+        /// and easy to reject: you won't have an ordinal for some nodes.
+        size: u64,
+    },
+    /// A FileNode represents a regular or executable file in a Directory or at the root.
+    File {
+        /// The blake3 digest of the file contents
+        digest: B3Digest,
+
+        /// The file content size
+        size: u64,
+
+        /// Whether the file is executable
+        executable: bool,
+    },
+    /// A SymlinkNode represents a symbolic link in a Directory or at the root.
+    Symlink {
+        /// The target of the symlink.
+        target: SymlinkTarget,
+    },
+}
diff --git a/tvix/castore/src/nodes/symlink_target.rs b/tvix/castore/src/nodes/symlink_target.rs
new file mode 100644
index 000000000000..e9a1a0bd05c2
--- /dev/null
+++ b/tvix/castore/src/nodes/symlink_target.rs
@@ -0,0 +1,223 @@
+use bstr::ByteSlice;
+use std::fmt::{self, Debug, Display};
+
+/// A wrapper type for symlink targets.
+/// Internally uses a [bytes::Bytes], but disallows empty targets and those
+/// containing null bytes.
+#[repr(transparent)]
+#[derive(Clone, PartialEq, Eq)]
+pub struct SymlinkTarget {
+    inner: bytes::Bytes,
+}
+
+/// The maximum length a symlink target can have.
+/// Linux allows 4095 bytes here.
+pub const MAX_TARGET_LEN: usize = 4095;
+
+impl AsRef<[u8]> for SymlinkTarget {
+    fn as_ref(&self) -> &[u8] {
+        self.inner.as_ref()
+    }
+}
+
+impl From<SymlinkTarget> for bytes::Bytes {
+    fn from(value: SymlinkTarget) -> Self {
+        value.inner
+    }
+}
+
+fn validate_symlink_target<B: AsRef<[u8]>>(symlink_target: B) -> Result<B, SymlinkTargetError> {
+    let v = symlink_target.as_ref();
+
+    if v.is_empty() {
+        return Err(SymlinkTargetError::Empty);
+    }
+    if v.len() > MAX_TARGET_LEN {
+        return Err(SymlinkTargetError::TooLong);
+    }
+    if v.contains(&0x00) {
+        return Err(SymlinkTargetError::Null);
+    }
+
+    Ok(symlink_target)
+}
+
+impl TryFrom<bytes::Bytes> for SymlinkTarget {
+    type Error = SymlinkTargetError;
+
+    fn try_from(value: bytes::Bytes) -> Result<Self, Self::Error> {
+        if let Err(e) = validate_symlink_target(&value) {
+            return Err(SymlinkTargetError::Convert(value, Box::new(e)));
+        }
+
+        Ok(Self { inner: value })
+    }
+}
+
+impl TryFrom<&'static [u8]> for SymlinkTarget {
+    type Error = SymlinkTargetError;
+
+    fn try_from(value: &'static [u8]) -> Result<Self, Self::Error> {
+        if let Err(e) = validate_symlink_target(&value) {
+            return Err(SymlinkTargetError::Convert(value.into(), Box::new(e)));
+        }
+
+        Ok(Self {
+            inner: bytes::Bytes::from_static(value),
+        })
+    }
+}
+
+impl TryFrom<&str> for SymlinkTarget {
+    type Error = SymlinkTargetError;
+
+    fn try_from(value: &str) -> Result<Self, Self::Error> {
+        if let Err(e) = validate_symlink_target(value) {
+            return Err(SymlinkTargetError::Convert(
+                value.to_owned().into(),
+                Box::new(e),
+            ));
+        }
+
+        Ok(Self {
+            inner: bytes::Bytes::copy_from_slice(value.as_bytes()),
+        })
+    }
+}
+
+impl Debug for SymlinkTarget {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        Debug::fmt(self.inner.as_bstr(), f)
+    }
+}
+
+impl Display for SymlinkTarget {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        Display::fmt(self.inner.as_bstr(), f)
+    }
+}
+
+/// Errors created when constructing / converting to [SymlinkTarget].
+#[derive(Debug, PartialEq, Eq, thiserror::Error)]
+#[cfg_attr(test, derive(Clone))]
+pub enum SymlinkTargetError {
+    #[error("cannot be empty")]
+    Empty,
+    #[error("cannot contain null bytes")]
+    Null,
+    #[error("cannot be over {} bytes long", MAX_TARGET_LEN)]
+    TooLong,
+    #[error("unable to convert '{:?}", .0.as_bstr())]
+    Convert(bytes::Bytes, Box<Self>),
+}
+
+#[cfg(test)]
+mod tests {
+    use bytes::Bytes;
+    use rstest::rstest;
+
+    use super::validate_symlink_target;
+    use super::{SymlinkTarget, SymlinkTargetError};
+
+    #[rstest]
+    #[case::empty(b"", SymlinkTargetError::Empty)]
+    #[case::null(b"foo\0", SymlinkTargetError::Null)]
+    fn errors(#[case] v: &'static [u8], #[case] err: SymlinkTargetError) {
+        {
+            assert_eq!(
+                Err(err.clone()),
+                validate_symlink_target(v),
+                "validate_symlink_target must fail as expected"
+            );
+        }
+
+        let exp_err_v = Bytes::from_static(v);
+
+        // Bytes
+        {
+            let v = Bytes::from_static(v);
+            assert_eq!(
+                Err(SymlinkTargetError::Convert(
+                    exp_err_v.clone(),
+                    Box::new(err.clone())
+                )),
+                SymlinkTarget::try_from(v),
+                "conversion must fail as expected"
+            );
+        }
+        // &[u8]
+        {
+            assert_eq!(
+                Err(SymlinkTargetError::Convert(
+                    exp_err_v.clone(),
+                    Box::new(err.clone())
+                )),
+                SymlinkTarget::try_from(v),
+                "conversion must fail as expected"
+            );
+        }
+        // &str, if this is valid UTF-8
+        {
+            if let Ok(v) = std::str::from_utf8(v) {
+                assert_eq!(
+                    Err(SymlinkTargetError::Convert(
+                        exp_err_v.clone(),
+                        Box::new(err.clone())
+                    )),
+                    SymlinkTarget::try_from(v),
+                    "conversion must fail as expected"
+                );
+            }
+        }
+    }
+
+    #[test]
+    fn error_toolong() {
+        assert_eq!(
+            Err(SymlinkTargetError::TooLong),
+            validate_symlink_target("X".repeat(5000).into_bytes().as_slice())
+        )
+    }
+
+    #[rstest]
+    #[case::boring(b"aa")]
+    #[case::dot(b".")]
+    #[case::dotsandslashes(b"./..")]
+    #[case::dotdot(b"..")]
+    #[case::slashes(b"a/b")]
+    #[case::slashes_and_absolute(b"/a/b")]
+    #[case::invalid_utf8(b"\xc5\xc4\xd6")]
+    fn success(#[case] v: &'static [u8]) {
+        let exp = SymlinkTarget { inner: v.into() };
+
+        // Bytes
+        {
+            let v: Bytes = v.into();
+            assert_eq!(
+                Ok(exp.clone()),
+                SymlinkTarget::try_from(v),
+                "conversion must succeed"
+            )
+        }
+
+        // &[u8]
+        {
+            assert_eq!(
+                Ok(exp.clone()),
+                SymlinkTarget::try_from(v),
+                "conversion must succeed"
+            )
+        }
+
+        // &str, if this is valid UTF-8
+        {
+            if let Ok(v) = std::str::from_utf8(v) {
+                assert_eq!(
+                    Ok(exp.clone()),
+                    SymlinkTarget::try_from(v),
+                    "conversion must succeed"
+                )
+            }
+        }
+    }
+}
diff --git a/tvix/castore/src/path/component.rs b/tvix/castore/src/path/component.rs
new file mode 100644
index 000000000000..78aca03c50fe
--- /dev/null
+++ b/tvix/castore/src/path/component.rs
@@ -0,0 +1,268 @@
+use bstr::ByteSlice;
+use std::fmt::{self, Debug, Display};
+
+/// A wrapper type for validated path components in the castore model.
+/// Internally uses a [bytes::Bytes], but disallows
+/// slashes, and null bytes to be present, as well as
+/// '.', '..' and the empty string.
+/// It also rejects components that are too long (> 255 bytes).
+#[repr(transparent)]
+#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
+pub struct PathComponent {
+    pub(super) inner: bytes::Bytes,
+}
+
+/// The maximum length an individual path component can have.
+/// Linux allows 255 bytes of actual name, so we pick that.
+pub const MAX_NAME_LEN: usize = 255;
+
+impl AsRef<[u8]> for PathComponent {
+    fn as_ref(&self) -> &[u8] {
+        self.inner.as_ref()
+    }
+}
+
+impl From<PathComponent> for bytes::Bytes {
+    fn from(value: PathComponent) -> Self {
+        value.inner
+    }
+}
+
+pub(super) fn validate_name<B: AsRef<[u8]>>(name: B) -> Result<(), PathComponentError> {
+    match name.as_ref() {
+        b"" => Err(PathComponentError::Empty),
+        b".." => Err(PathComponentError::Parent),
+        b"." => Err(PathComponentError::CurDir),
+        v if v.len() > MAX_NAME_LEN => Err(PathComponentError::TooLong),
+        v if v.contains(&0x00) => Err(PathComponentError::Null),
+        v if v.contains(&b'/') => Err(PathComponentError::Slashes),
+        _ => Ok(()),
+    }
+}
+
+impl TryFrom<bytes::Bytes> for PathComponent {
+    type Error = PathComponentError;
+
+    fn try_from(value: bytes::Bytes) -> Result<Self, Self::Error> {
+        if let Err(e) = validate_name(&value) {
+            return Err(PathComponentError::Convert(value, Box::new(e)));
+        }
+
+        Ok(Self { inner: value })
+    }
+}
+
+impl TryFrom<&'static [u8]> for PathComponent {
+    type Error = PathComponentError;
+
+    fn try_from(value: &'static [u8]) -> Result<Self, Self::Error> {
+        if let Err(e) = validate_name(value) {
+            return Err(PathComponentError::Convert(value.into(), Box::new(e)));
+        }
+
+        Ok(Self {
+            inner: bytes::Bytes::from_static(value),
+        })
+    }
+}
+
+impl TryFrom<&str> for PathComponent {
+    type Error = PathComponentError;
+
+    fn try_from(value: &str) -> Result<Self, Self::Error> {
+        if let Err(e) = validate_name(value) {
+            return Err(PathComponentError::Convert(
+                value.to_owned().into(),
+                Box::new(e),
+            ));
+        }
+
+        Ok(Self {
+            inner: bytes::Bytes::copy_from_slice(value.as_bytes()),
+        })
+    }
+}
+
+impl TryFrom<&std::ffi::CStr> for PathComponent {
+    type Error = PathComponentError;
+
+    fn try_from(value: &std::ffi::CStr) -> Result<Self, Self::Error> {
+        let value = value.to_bytes();
+        if let Err(e) = validate_name(value) {
+            return Err(PathComponentError::Convert(
+                value.to_owned().into(),
+                Box::new(e),
+            ));
+        }
+
+        Ok(Self {
+            inner: bytes::Bytes::copy_from_slice(value),
+        })
+    }
+}
+
+impl Debug for PathComponent {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        Debug::fmt(self.inner.as_bstr(), f)
+    }
+}
+
+impl Display for PathComponent {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        Display::fmt(self.inner.as_bstr(), f)
+    }
+}
+
+/// Errors created when parsing / validating [PathComponent].
+#[derive(Debug, PartialEq, thiserror::Error)]
+#[cfg_attr(test, derive(Clone))]
+pub enum PathComponentError {
+    #[error("cannot be empty")]
+    Empty,
+    #[error("cannot contain null bytes")]
+    Null,
+    #[error("cannot be '.'")]
+    CurDir,
+    #[error("cannot be '..'")]
+    Parent,
+    #[error("cannot contain slashes")]
+    Slashes,
+    #[error("cannot be over {} bytes long", MAX_NAME_LEN)]
+    TooLong,
+    #[error("unable to convert '{:?}'", .0.as_bstr())]
+    Convert(bytes::Bytes, #[source] Box<Self>),
+}
+
+#[cfg(test)]
+mod tests {
+    use std::ffi::CString;
+
+    use bytes::Bytes;
+    use rstest::rstest;
+
+    use super::{validate_name, PathComponent, PathComponentError};
+
+    #[rstest]
+    #[case::empty(b"", PathComponentError::Empty)]
+    #[case::null(b"foo\0", PathComponentError::Null)]
+    #[case::curdir(b".", PathComponentError::CurDir)]
+    #[case::parent(b"..", PathComponentError::Parent)]
+    #[case::slashes1(b"a/b", PathComponentError::Slashes)]
+    #[case::slashes2(b"/", PathComponentError::Slashes)]
+    fn errors(#[case] v: &'static [u8], #[case] err: PathComponentError) {
+        {
+            assert_eq!(
+                Err(err.clone()),
+                validate_name(v),
+                "validate_name must fail as expected"
+            );
+        }
+
+        let exp_err_v = Bytes::from_static(v);
+
+        // Bytes
+        {
+            let v = Bytes::from_static(v);
+            assert_eq!(
+                Err(PathComponentError::Convert(
+                    exp_err_v.clone(),
+                    Box::new(err.clone())
+                )),
+                PathComponent::try_from(v),
+                "conversion must fail as expected"
+            );
+        }
+        // &[u8]
+        {
+            assert_eq!(
+                Err(PathComponentError::Convert(
+                    exp_err_v.clone(),
+                    Box::new(err.clone())
+                )),
+                PathComponent::try_from(v),
+                "conversion must fail as expected"
+            );
+        }
+        // &str, if it is valid UTF-8
+        {
+            if let Ok(v) = std::str::from_utf8(v) {
+                assert_eq!(
+                    Err(PathComponentError::Convert(
+                        exp_err_v.clone(),
+                        Box::new(err.clone())
+                    )),
+                    PathComponent::try_from(v),
+                    "conversion must fail as expected"
+                );
+            }
+        }
+        // &CStr, if it can be constructed (fails if the payload contains null bytes)
+        {
+            if let Ok(v) = CString::new(v) {
+                let v = v.as_ref();
+                assert_eq!(
+                    Err(PathComponentError::Convert(
+                        exp_err_v.clone(),
+                        Box::new(err.clone())
+                    )),
+                    PathComponent::try_from(v),
+                    "conversion must fail as expected"
+                );
+            }
+        }
+    }
+
+    #[test]
+    fn error_toolong() {
+        assert_eq!(
+            Err(PathComponentError::TooLong),
+            validate_name("X".repeat(500).into_bytes().as_slice())
+        )
+    }
+
+    #[test]
+    fn success() {
+        let exp = PathComponent { inner: "aa".into() };
+
+        // Bytes
+        {
+            let v: Bytes = "aa".into();
+            assert_eq!(
+                Ok(exp.clone()),
+                PathComponent::try_from(v),
+                "conversion must succeed"
+            );
+        }
+
+        // &[u8]
+        {
+            let v: &[u8] = b"aa";
+            assert_eq!(
+                Ok(exp.clone()),
+                PathComponent::try_from(v),
+                "conversion must succeed"
+            );
+        }
+
+        // &str
+        {
+            let v: &str = "aa";
+            assert_eq!(
+                Ok(exp.clone()),
+                PathComponent::try_from(v),
+                "conversion must succeed"
+            );
+        }
+
+        // &CStr
+        {
+            let v = CString::new("aa").expect("CString must construct");
+            let v = v.as_c_str();
+            assert_eq!(
+                Ok(exp.clone()),
+                PathComponent::try_from(v),
+                "conversion must succeed"
+            );
+        }
+    }
+}
diff --git a/tvix/castore/src/path.rs b/tvix/castore/src/path/mod.rs
index fcc2bd01fbd6..15f31a570da9 100644
--- a/tvix/castore/src/path.rs
+++ b/tvix/castore/src/path/mod.rs
@@ -1,5 +1,5 @@
 //! Contains data structures to deal with Paths in the tvix-castore model.
-
+use bstr::ByteSlice;
 use std::{
     borrow::Borrow,
     fmt::{self, Debug, Display},
@@ -8,9 +8,8 @@ use std::{
     str::FromStr,
 };
 
-use bstr::ByteSlice;
-
-use crate::proto::validate_node_name;
+mod component;
+pub use component::{PathComponent, PathComponentError};
 
 /// Represents a Path in the castore model.
 /// These are always relative, and platform-independent, which distinguishes
@@ -38,7 +37,9 @@ impl Path {
         if !bytes.is_empty() {
             // Ensure all components are valid castore node names.
             for component in bytes.split_str(b"/") {
-                validate_node_name(component).ok()?;
+                if component::validate_name(component).is_err() {
+                    return None;
+                }
             }
         }
 
@@ -81,10 +82,26 @@ impl Path {
         Ok(v)
     }
 
+    /// Provides an iterator over the components of the path,
+    /// which are invividual [PathComponent].
+    /// In case the path is empty, an empty iterator is returned.
+    pub fn components(&self) -> impl Iterator<Item = PathComponent> + '_ {
+        let mut iter = self.inner.split_str(&b"/");
+
+        // We don't want to return an empty element, consume it if it's the only one.
+        if self.inner.is_empty() {
+            let _ = iter.next();
+        }
+
+        iter.map(|b| PathComponent {
+            inner: bytes::Bytes::copy_from_slice(b),
+        })
+    }
+
     /// Produces an iterator over the components of the path, which are
     /// individual byte slices.
     /// In case the path is empty, an empty iterator is returned.
-    pub fn components(&self) -> impl Iterator<Item = &[u8]> {
+    pub fn components_bytes(&self) -> impl Iterator<Item = &[u8]> {
         let mut iter = self.inner.split_str(&b"/");
 
         // We don't want to return an empty element, consume it if it's the only one.
@@ -95,11 +112,16 @@ impl Path {
         iter
     }
 
-    /// Returns the final component of the Path, if there is one.
-    pub fn file_name(&self) -> Option<&[u8]> {
+    /// Returns the final component of the Path, if there is one, in bytes.
+    pub fn file_name(&self) -> Option<PathComponent> {
         self.components().last()
     }
 
+    /// Returns the final component of the Path, if there is one, in bytes.
+    pub fn file_name_bytes(&self) -> Option<&[u8]> {
+        self.components_bytes().last()
+    }
+
     pub fn as_bytes(&self) -> &[u8] {
         &self.inner
     }
@@ -211,7 +233,9 @@ impl PathBuf {
 
     /// Adjoins `name` to self.
     pub fn try_push(&mut self, name: &[u8]) -> Result<(), std::io::Error> {
-        validate_node_name(name).map_err(|_| std::io::ErrorKind::InvalidData)?;
+        if component::validate_name(name).is_err() {
+            return Err(std::io::ErrorKind::InvalidData.into());
+        }
 
         if !self.inner.is_empty() {
             self.inner.push(b'/');
@@ -329,7 +353,7 @@ mod test {
         assert_eq!(s.as_bytes(), p.as_bytes(), "inner bytes mismatch");
         assert_eq!(
             num_components,
-            p.components().count(),
+            p.components_bytes().count(),
             "number of components mismatch"
         );
     }
@@ -396,10 +420,10 @@ mod test {
     #[case("a", vec!["a"])]
     #[case("a/b", vec!["a", "b"])]
     #[case("a/b/c", vec!["a","b", "c"])]
-    pub fn components(#[case] p: PathBuf, #[case] exp_components: Vec<&str>) {
+    pub fn components_bytes(#[case] p: PathBuf, #[case] exp_components: Vec<&str>) {
         assert_eq!(
             exp_components,
-            p.components()
+            p.components_bytes()
                 .map(|x| x.to_str().unwrap())
                 .collect::<Vec<_>>()
         );
diff --git a/tvix/castore/src/proto/grpc_directoryservice_wrapper.rs b/tvix/castore/src/proto/grpc_directoryservice_wrapper.rs
index 5c1428690cb4..62fdb34a25a0 100644
--- a/tvix/castore/src/proto/grpc_directoryservice_wrapper.rs
+++ b/tvix/castore/src/proto/grpc_directoryservice_wrapper.rs
@@ -1,6 +1,5 @@
-use crate::directoryservice::ClosureValidator;
-use crate::proto;
-use crate::{directoryservice::DirectoryService, B3Digest};
+use crate::directoryservice::{DirectoryGraph, DirectoryService, LeavesToRootValidator};
+use crate::{proto, B3Digest, DirectoryError};
 use futures::stream::BoxStream;
 use futures::TryStreamExt;
 use std::ops::Deref;
@@ -57,13 +56,16 @@ where
                                 Status::not_found(format!("directory {} not found", digest))
                             })?;
 
-                        Box::pin(once(Ok(directory)))
+                        Box::pin(once(Ok(directory.into())))
                     } else {
                         // If recursive was requested, traverse via get_recursive.
                         Box::pin(
-                            self.directory_service.get_recursive(&digest).map_err(|e| {
-                                tonic::Status::new(tonic::Code::Internal, e.to_string())
-                            }),
+                            self.directory_service
+                                .get_recursive(&digest)
+                                .map_ok(proto::Directory::from)
+                                .map_err(|e| {
+                                    tonic::Status::new(tonic::Code::Internal, e.to_string())
+                                }),
                         )
                     }
                 }))
@@ -78,14 +80,22 @@ where
     ) -> Result<Response<proto::PutDirectoryResponse>, Status> {
         let mut req_inner = request.into_inner();
 
-        // We put all Directory messages we receive into ClosureValidator first.
-        let mut validator = ClosureValidator::default();
+        // We put all Directory messages we receive into DirectoryGraph.
+        let mut validator = DirectoryGraph::<LeavesToRootValidator>::default();
         while let Some(directory) = req_inner.message().await? {
-            validator.add(directory)?;
+            validator
+                .add(directory.try_into().map_err(|e: DirectoryError| {
+                    tonic::Status::new(tonic::Code::Internal, e.to_string())
+                })?)
+                .map_err(|e| tonic::Status::new(tonic::Code::Internal, e.to_string()))?;
         }
 
         // drain, which validates connectivity too.
-        let directories = validator.finalize()?;
+        let directories = validator
+            .validate()
+            .map_err(|e| tonic::Status::new(tonic::Code::Internal, e.to_string()))?
+            .drain_leaves_to_root()
+            .collect::<Vec<_>>();
 
         let mut directory_putter = self.directory_service.put_multiple_start();
         for directory in directories {
diff --git a/tvix/castore/src/proto/mod.rs b/tvix/castore/src/proto/mod.rs
index 5374e3ae5a80..8bc74b412676 100644
--- a/tvix/castore/src/proto/mod.rs
+++ b/tvix/castore/src/proto/mod.rs
@@ -1,18 +1,13 @@
-#![allow(non_snake_case)]
-// https://github.com/hyperium/tonic/issues/1056
-use bstr::ByteSlice;
-use std::{collections::HashSet, iter::Peekable, str};
-
 use prost::Message;
+use std::cmp::Ordering;
 
 mod grpc_blobservice_wrapper;
 mod grpc_directoryservice_wrapper;
 
+use crate::{path::PathComponent, B3Digest, DirectoryError};
 pub use grpc_blobservice_wrapper::GRPCBlobServiceWrapper;
 pub use grpc_directoryservice_wrapper::GRPCDirectoryServiceWrapper;
 
-use crate::{B3Digest, B3_LEN};
-
 tonic::include_proto!("tvix.castore.v1");
 
 #[cfg(feature = "tonic-reflection")]
@@ -24,38 +19,6 @@ pub const FILE_DESCRIPTOR_SET: &[u8] = tonic::include_file_descriptor_set!("tvix
 #[cfg(test)]
 mod tests;
 
-/// Errors that can occur during the validation of [Directory] messages.
-#[derive(Debug, PartialEq, Eq, thiserror::Error)]
-pub enum ValidateDirectoryError {
-    /// Elements are not in sorted order
-    #[error("{:?} is not sorted", .0.as_bstr())]
-    WrongSorting(Vec<u8>),
-    /// Multiple elements with the same name encountered
-    #[error("{:?} is a duplicate name", .0.as_bstr())]
-    DuplicateName(Vec<u8>),
-    /// Invalid node
-    #[error("invalid node with name {:?}: {:?}", .0.as_bstr(), .1.to_string())]
-    InvalidNode(Vec<u8>, ValidateNodeError),
-    #[error("Total size exceeds u32::MAX")]
-    SizeOverflow,
-}
-
-/// Errors that occur during Node validation
-#[derive(Debug, PartialEq, Eq, thiserror::Error)]
-pub enum ValidateNodeError {
-    #[error("No node set")]
-    NoNodeSet,
-    /// Invalid digest length encountered
-    #[error("Invalid Digest length: {0}")]
-    InvalidDigestLen(usize),
-    /// Invalid name encountered
-    #[error("Invalid name: {}", .0.as_bstr())]
-    InvalidName(Vec<u8>),
-    /// Invalid symlink target
-    #[error("Invalid symlink target: {}", .0.as_bstr())]
-    InvalidSymlinkTarget(Vec<u8>),
-}
-
 /// Errors that occur during StatBlobResponse validation
 #[derive(Debug, PartialEq, Eq, thiserror::Error)]
 pub enum ValidateStatBlobResponseError {
@@ -64,184 +27,6 @@ pub enum ValidateStatBlobResponseError {
     InvalidDigestLen(usize, usize),
 }
 
-/// Checks a Node name for validity as an intermediate node.
-/// We disallow slashes, null bytes, '.', '..' and the empty string.
-pub(crate) fn validate_node_name(name: &[u8]) -> Result<(), ValidateNodeError> {
-    if name.is_empty()
-        || name == b".."
-        || name == b"."
-        || name.contains(&0x00)
-        || name.contains(&b'/')
-    {
-        Err(ValidateNodeError::InvalidName(name.to_owned()))
-    } else {
-        Ok(())
-    }
-}
-
-/// NamedNode is implemented for [FileNode], [DirectoryNode] and [SymlinkNode]
-/// and [node::Node], so we can ask all of them for the name easily.
-pub trait NamedNode {
-    fn get_name(&self) -> &[u8];
-}
-
-impl NamedNode for &FileNode {
-    fn get_name(&self) -> &[u8] {
-        &self.name
-    }
-}
-
-impl NamedNode for &DirectoryNode {
-    fn get_name(&self) -> &[u8] {
-        &self.name
-    }
-}
-
-impl NamedNode for &SymlinkNode {
-    fn get_name(&self) -> &[u8] {
-        &self.name
-    }
-}
-
-impl NamedNode for node::Node {
-    fn get_name(&self) -> &[u8] {
-        match self {
-            node::Node::File(node_file) => &node_file.name,
-            node::Node::Directory(node_directory) => &node_directory.name,
-            node::Node::Symlink(node_symlink) => &node_symlink.name,
-        }
-    }
-}
-
-impl Node {
-    /// Ensures the node has a valid enum kind (is Some), and passes its
-    // per-enum validation.
-    pub fn validate(&self) -> Result<(), ValidateNodeError> {
-        if let Some(node) = self.node.as_ref() {
-            node.validate()
-        } else {
-            Err(ValidateNodeError::NoNodeSet)
-        }
-    }
-}
-
-impl node::Node {
-    /// Returns the node with a new name.
-    pub fn rename(self, name: bytes::Bytes) -> Self {
-        match self {
-            node::Node::Directory(n) => node::Node::Directory(DirectoryNode { name, ..n }),
-            node::Node::File(n) => node::Node::File(FileNode { name, ..n }),
-            node::Node::Symlink(n) => node::Node::Symlink(SymlinkNode { name, ..n }),
-        }
-    }
-
-    /// Ensures the node has a valid name, and checks the type-specific fields too.
-    pub fn validate(&self) -> Result<(), ValidateNodeError> {
-        match self {
-            // for a directory root node, ensure the digest has the appropriate size.
-            node::Node::Directory(directory_node) => {
-                if directory_node.digest.len() != B3_LEN {
-                    Err(ValidateNodeError::InvalidDigestLen(
-                        directory_node.digest.len(),
-                    ))?;
-                }
-                validate_node_name(&directory_node.name)
-            }
-            // for a file root node, ensure the digest has the appropriate size.
-            node::Node::File(file_node) => {
-                if file_node.digest.len() != B3_LEN {
-                    Err(ValidateNodeError::InvalidDigestLen(file_node.digest.len()))?;
-                }
-                validate_node_name(&file_node.name)
-            }
-            // ensure the symlink target is not empty and doesn't contain null bytes.
-            node::Node::Symlink(symlink_node) => {
-                if symlink_node.target.is_empty() || symlink_node.target.contains(&b'\0') {
-                    Err(ValidateNodeError::InvalidSymlinkTarget(
-                        symlink_node.target.to_vec(),
-                    ))?;
-                }
-                validate_node_name(&symlink_node.name)
-            }
-        }
-    }
-}
-
-impl PartialOrd for node::Node {
-    fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
-        Some(self.cmp(other))
-    }
-}
-
-impl Ord for node::Node {
-    fn cmp(&self, other: &Self) -> std::cmp::Ordering {
-        self.get_name().cmp(other.get_name())
-    }
-}
-
-impl PartialOrd for FileNode {
-    fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
-        Some(self.cmp(other))
-    }
-}
-
-impl Ord for FileNode {
-    fn cmp(&self, other: &Self) -> std::cmp::Ordering {
-        self.get_name().cmp(other.get_name())
-    }
-}
-
-impl PartialOrd for SymlinkNode {
-    fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
-        Some(self.cmp(other))
-    }
-}
-
-impl Ord for SymlinkNode {
-    fn cmp(&self, other: &Self) -> std::cmp::Ordering {
-        self.get_name().cmp(other.get_name())
-    }
-}
-
-impl PartialOrd for DirectoryNode {
-    fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
-        Some(self.cmp(other))
-    }
-}
-
-impl Ord for DirectoryNode {
-    fn cmp(&self, other: &Self) -> std::cmp::Ordering {
-        self.get_name().cmp(other.get_name())
-    }
-}
-
-/// Accepts a name, and a mutable reference to the previous name.
-/// If the passed name is larger than the previous one, the reference is updated.
-/// If it's not, an error is returned.
-fn update_if_lt_prev<'n>(
-    prev_name: &mut &'n [u8],
-    name: &'n [u8],
-) -> Result<(), ValidateDirectoryError> {
-    if *name < **prev_name {
-        return Err(ValidateDirectoryError::WrongSorting(name.to_vec()));
-    }
-    *prev_name = name;
-    Ok(())
-}
-
-/// Inserts the given name into a HashSet if it's not already in there.
-/// If it is, an error is returned.
-fn insert_once<'n>(
-    seen_names: &mut HashSet<&'n [u8]>,
-    name: &'n [u8],
-) -> Result<(), ValidateDirectoryError> {
-    if seen_names.get(name).is_some() {
-        return Err(ValidateDirectoryError::DuplicateName(name.to_vec()));
-    }
-    seen_names.insert(name);
-    Ok(())
-}
-
 fn checked_sum(iter: impl IntoIterator<Item = u64>) -> Option<u64> {
     iter.into_iter().try_fold(0u64, |acc, i| acc.checked_add(i))
 }
@@ -278,117 +63,211 @@ impl Directory {
             .as_bytes()
             .into()
     }
+}
 
-    /// validate checks the directory for invalid data, such as:
-    /// - violations of name restrictions
-    /// - invalid digest lengths
-    /// - not properly sorted lists
-    /// - duplicate names in the three lists
-    pub fn validate(&self) -> Result<(), ValidateDirectoryError> {
-        let mut seen_names: HashSet<&[u8]> = HashSet::new();
-
-        let mut last_directory_name: &[u8] = b"";
-        let mut last_file_name: &[u8] = b"";
-        let mut last_symlink_name: &[u8] = b"";
+impl TryFrom<Directory> for crate::Directory {
+    type Error = DirectoryError;
+
+    fn try_from(value: Directory) -> Result<Self, Self::Error> {
+        // Check directories, files and symlinks are sorted
+        // We'll notice duplicates across all three fields when constructing the Directory.
+        // FUTUREWORK: use is_sorted() once stable, and/or implement the producer for
+        // [crate::Directory::try_from_iter] iterating over all three and doing all checks inline.
+        value
+            .directories
+            .iter()
+            .try_fold(&b""[..], |prev_name, e| {
+                match e.name.as_ref().cmp(prev_name) {
+                    Ordering::Less => Err(DirectoryError::WrongSorting(e.name.to_owned())),
+                    Ordering::Equal => Err(DirectoryError::DuplicateName(
+                        e.name
+                            .to_owned()
+                            .try_into()
+                            .map_err(DirectoryError::InvalidName)?,
+                    )),
+                    Ordering::Greater => Ok(e.name.as_ref()),
+                }
+            })?;
+        value.files.iter().try_fold(&b""[..], |prev_name, e| {
+            match e.name.as_ref().cmp(prev_name) {
+                Ordering::Less => Err(DirectoryError::WrongSorting(e.name.to_owned())),
+                Ordering::Equal => Err(DirectoryError::DuplicateName(
+                    e.name
+                        .to_owned()
+                        .try_into()
+                        .map_err(DirectoryError::InvalidName)?,
+                )),
+                Ordering::Greater => Ok(e.name.as_ref()),
+            }
+        })?;
+        value.symlinks.iter().try_fold(&b""[..], |prev_name, e| {
+            match e.name.as_ref().cmp(prev_name) {
+                Ordering::Less => Err(DirectoryError::WrongSorting(e.name.to_owned())),
+                Ordering::Equal => Err(DirectoryError::DuplicateName(
+                    e.name
+                        .to_owned()
+                        .try_into()
+                        .map_err(DirectoryError::InvalidName)?,
+                )),
+                Ordering::Greater => Ok(e.name.as_ref()),
+            }
+        })?;
 
-        // check directories
-        for directory_node in &self.directories {
-            node::Node::Directory(directory_node.clone())
-                .validate()
-                .map_err(|e| {
-                    ValidateDirectoryError::InvalidNode(directory_node.name.to_vec(), e)
-                })?;
+        // FUTUREWORK: use is_sorted() once stable, and/or implement the producer for
+        // [crate::Directory::try_from_iter] iterating over all three and doing all checks inline.
+        let mut elems: Vec<(PathComponent, crate::Node)> =
+            Vec::with_capacity(value.directories.len() + value.files.len() + value.symlinks.len());
 
-            update_if_lt_prev(&mut last_directory_name, &directory_node.name)?;
-            insert_once(&mut seen_names, &directory_node.name)?;
+        for e in value.directories {
+            elems.push(
+                Node {
+                    node: Some(node::Node::Directory(e)),
+                }
+                .into_name_and_node()?,
+            );
         }
 
-        // check files
-        for file_node in &self.files {
-            node::Node::File(file_node.clone())
-                .validate()
-                .map_err(|e| ValidateDirectoryError::InvalidNode(file_node.name.to_vec(), e))?;
-
-            update_if_lt_prev(&mut last_file_name, &file_node.name)?;
-            insert_once(&mut seen_names, &file_node.name)?;
+        for e in value.files {
+            elems.push(
+                Node {
+                    node: Some(node::Node::File(e)),
+                }
+                .into_name_and_node()?,
+            )
         }
 
-        // check symlinks
-        for symlink_node in &self.symlinks {
-            node::Node::Symlink(symlink_node.clone())
-                .validate()
-                .map_err(|e| ValidateDirectoryError::InvalidNode(symlink_node.name.to_vec(), e))?;
-
-            update_if_lt_prev(&mut last_symlink_name, &symlink_node.name)?;
-            insert_once(&mut seen_names, &symlink_node.name)?;
+        for e in value.symlinks {
+            elems.push(
+                Node {
+                    node: Some(node::Node::Symlink(e)),
+                }
+                .into_name_and_node()?,
+            )
         }
 
-        self.size_checked()
-            .ok_or(ValidateDirectoryError::SizeOverflow)?;
-
-        Ok(())
+        crate::Directory::try_from_iter(elems)
     }
+}
 
-    /// Allows iterating over all three nodes ([DirectoryNode], [FileNode],
-    /// [SymlinkNode]) in an ordered fashion, as long as the individual lists
-    /// are sorted (which can be checked by the [Directory::validate]).
-    pub fn nodes(&self) -> DirectoryNodesIterator {
-        return DirectoryNodesIterator {
-            i_directories: self.directories.iter().peekable(),
-            i_files: self.files.iter().peekable(),
-            i_symlinks: self.symlinks.iter().peekable(),
-        };
-    }
+impl From<crate::Directory> for Directory {
+    fn from(value: crate::Directory) -> Self {
+        let mut directories = vec![];
+        let mut files = vec![];
+        let mut symlinks = vec![];
+
+        for (name, node) in value.into_nodes() {
+            match node {
+                crate::Node::File {
+                    digest,
+                    size,
+                    executable,
+                } => files.push(FileNode {
+                    name: name.into(),
+                    digest: digest.into(),
+                    size,
+                    executable,
+                }),
+                crate::Node::Directory { digest, size } => directories.push(DirectoryNode {
+                    name: name.into(),
+                    digest: digest.into(),
+                    size,
+                }),
+                crate::Node::Symlink { target } => {
+                    symlinks.push(SymlinkNode {
+                        name: name.into(),
+                        target: target.into(),
+                    });
+                }
+            }
+        }
 
-    /// Adds the specified [node::Node] to the [Directory], preserving sorted entries.
-    /// This assumes the [Directory] to be sorted prior to adding the node.
-    ///
-    /// Inserting an element that already exists with the same name in the directory is not
-    /// supported.
-    pub fn add(&mut self, node: node::Node) {
-        debug_assert!(
-            !self.files.iter().any(|x| x.get_name() == node.get_name()),
-            "name already exists in files"
-        );
-        debug_assert!(
-            !self
-                .directories
-                .iter()
-                .any(|x| x.get_name() == node.get_name()),
-            "name already exists in directories"
-        );
-        debug_assert!(
-            !self
-                .symlinks
-                .iter()
-                .any(|x| x.get_name() == node.get_name()),
-            "name already exists in symlinks"
-        );
+        Directory {
+            directories,
+            files,
+            symlinks,
+        }
+    }
+}
 
-        match node {
-            node::Node::File(node) => {
-                let pos = self
-                    .files
-                    .binary_search(&node)
-                    .expect_err("Tvix bug: dir entry with name already exists");
-                self.files.insert(pos, node);
+impl Node {
+    /// Converts a proto [Node] to a [crate::Node], and splits off the name.
+    pub fn into_name_and_node(self) -> Result<(PathComponent, crate::Node), DirectoryError> {
+        match self.node.ok_or_else(|| DirectoryError::NoNodeSet)? {
+            node::Node::Directory(n) => {
+                let name: PathComponent = n.name.try_into().map_err(DirectoryError::InvalidName)?;
+                let digest = B3Digest::try_from(n.digest)
+                    .map_err(|e| DirectoryError::InvalidNode(name.clone(), e.into()))?;
+
+                let node = crate::Node::Directory {
+                    digest,
+                    size: n.size,
+                };
+
+                Ok((name, node))
             }
-            node::Node::Directory(node) => {
-                let pos = self
-                    .directories
-                    .binary_search(&node)
-                    .expect_err("Tvix bug: dir entry with name already exists");
-                self.directories.insert(pos, node);
+            node::Node::File(n) => {
+                let name: PathComponent = n.name.try_into().map_err(DirectoryError::InvalidName)?;
+                let digest = B3Digest::try_from(n.digest)
+                    .map_err(|e| DirectoryError::InvalidNode(name.clone(), e.into()))?;
+
+                let node = crate::Node::File {
+                    digest,
+                    size: n.size,
+                    executable: n.executable,
+                };
+
+                Ok((name, node))
             }
-            node::Node::Symlink(node) => {
-                let pos = self
-                    .symlinks
-                    .binary_search(&node)
-                    .expect_err("Tvix bug: dir entry with name already exists");
-                self.symlinks.insert(pos, node);
+
+            node::Node::Symlink(n) => {
+                let name: PathComponent = n.name.try_into().map_err(DirectoryError::InvalidName)?;
+
+                let node = crate::Node::Symlink {
+                    target: n.target.try_into().map_err(|e| {
+                        DirectoryError::InvalidNode(
+                            name.clone(),
+                            crate::ValidateNodeError::InvalidSymlinkTarget(e),
+                        )
+                    })?,
+                };
+
+                Ok((name, node))
             }
         }
     }
+
+    /// Construsts a [Node] from a name and [crate::Node].
+    /// The name is a [bytes::Bytes], not a [PathComponent], as we have use an
+    /// empty name in some places.
+    pub fn from_name_and_node(name: bytes::Bytes, n: crate::Node) -> Self {
+        match n {
+            crate::Node::Directory { digest, size } => Self {
+                node: Some(node::Node::Directory(DirectoryNode {
+                    name,
+                    digest: digest.into(),
+                    size,
+                })),
+            },
+            crate::Node::File {
+                digest,
+                size,
+                executable,
+            } => Self {
+                node: Some(node::Node::File(FileNode {
+                    name,
+                    digest: digest.into(),
+                    size,
+                    executable,
+                })),
+            },
+            crate::Node::Symlink { target } => Self {
+                node: Some(node::Node::Symlink(SymlinkNode {
+                    name,
+                    target: target.into(),
+                })),
+            },
+        }
+    }
 }
 
 impl StatBlobResponse {
@@ -407,65 +286,3 @@ impl StatBlobResponse {
         Ok(())
     }
 }
-
-/// Struct to hold the state of an iterator over all nodes of a Directory.
-///
-/// Internally, this keeps peekable Iterators over all three lists of a
-/// Directory message.
-pub struct DirectoryNodesIterator<'a> {
-    // directory: &Directory,
-    i_directories: Peekable<std::slice::Iter<'a, DirectoryNode>>,
-    i_files: Peekable<std::slice::Iter<'a, FileNode>>,
-    i_symlinks: Peekable<std::slice::Iter<'a, SymlinkNode>>,
-}
-
-/// looks at two elements implementing NamedNode, and returns true if "left
-/// is smaller / comes first".
-///
-/// Some(_) is preferred over None.
-fn left_name_lt_right<A: NamedNode, B: NamedNode>(left: Option<&A>, right: Option<&B>) -> bool {
-    match left {
-        // if left is None, right always wins
-        None => false,
-        Some(left_inner) => {
-            // left is Some.
-            match right {
-                // left is Some, right is None - left wins.
-                None => true,
-                Some(right_inner) => {
-                    // both are Some - compare the name.
-                    return left_inner.get_name() < right_inner.get_name();
-                }
-            }
-        }
-    }
-}
-
-impl Iterator for DirectoryNodesIterator<'_> {
-    type Item = node::Node;
-
-    // next returns the next node in the Directory.
-    // we peek at all three internal iterators, and pick the one with the
-    // smallest name, to ensure lexicographical ordering.
-    // The individual lists are already known to be sorted.
-    fn next(&mut self) -> Option<Self::Item> {
-        if left_name_lt_right(self.i_directories.peek(), self.i_files.peek()) {
-            // i_directories is still in the game, compare with symlinks
-            if left_name_lt_right(self.i_directories.peek(), self.i_symlinks.peek()) {
-                self.i_directories
-                    .next()
-                    .cloned()
-                    .map(node::Node::Directory)
-            } else {
-                self.i_symlinks.next().cloned().map(node::Node::Symlink)
-            }
-        } else {
-            // i_files is still in the game, compare with symlinks
-            if left_name_lt_right(self.i_files.peek(), self.i_symlinks.peek()) {
-                self.i_files.next().cloned().map(node::Node::File)
-            } else {
-                self.i_symlinks.next().cloned().map(node::Node::Symlink)
-            }
-        }
-    }
-}
diff --git a/tvix/castore/src/proto/tests/directory.rs b/tvix/castore/src/proto/tests/directory.rs
index 81b73a048d52..efbc4e9f2af1 100644
--- a/tvix/castore/src/proto/tests/directory.rs
+++ b/tvix/castore/src/proto/tests/directory.rs
@@ -1,7 +1,5 @@
-use crate::proto::{
-    node, Directory, DirectoryNode, FileNode, SymlinkNode, ValidateDirectoryError,
-    ValidateNodeError,
-};
+use crate::proto::{Directory, DirectoryError, DirectoryNode, FileNode, SymlinkNode};
+use crate::ValidateNodeError;
 
 use hex_literal::hex;
 
@@ -149,7 +147,7 @@ fn digest() {
 #[test]
 fn validate_empty() {
     let d = Directory::default();
-    assert_eq!(d.validate(), Ok(()));
+    assert!(crate::Directory::try_from(d).is_ok());
 }
 
 #[test]
@@ -157,18 +155,15 @@ fn validate_invalid_names() {
     {
         let d = Directory {
             directories: vec![DirectoryNode {
-                name: "".into(),
+                name: b"\0"[..].into(),
                 digest: DUMMY_DIGEST.to_vec().into(),
                 size: 42,
             }],
             ..Default::default()
         };
-        match d.validate().expect_err("must fail") {
-            ValidateDirectoryError::InvalidNode(n, ValidateNodeError::InvalidName(_)) => {
-                assert_eq!(n, b"")
-            }
-            _ => panic!("unexpected error"),
-        };
+
+        let e = crate::Directory::try_from(d).expect_err("must fail");
+        assert!(matches!(e, DirectoryError::InvalidName(_)));
     }
 
     {
@@ -180,12 +175,8 @@ fn validate_invalid_names() {
             }],
             ..Default::default()
         };
-        match d.validate().expect_err("must fail") {
-            ValidateDirectoryError::InvalidNode(n, ValidateNodeError::InvalidName(_)) => {
-                assert_eq!(n, b".")
-            }
-            _ => panic!("unexpected error"),
-        };
+        let e = crate::Directory::try_from(d).expect_err("must fail");
+        assert!(matches!(e, DirectoryError::InvalidName(_)));
     }
 
     {
@@ -198,12 +189,8 @@ fn validate_invalid_names() {
             }],
             ..Default::default()
         };
-        match d.validate().expect_err("must fail") {
-            ValidateDirectoryError::InvalidNode(n, ValidateNodeError::InvalidName(_)) => {
-                assert_eq!(n, b"..")
-            }
-            _ => panic!("unexpected error"),
-        };
+        let e = crate::Directory::try_from(d).expect_err("must fail");
+        assert!(matches!(e, DirectoryError::InvalidName(_)));
     }
 
     {
@@ -214,12 +201,8 @@ fn validate_invalid_names() {
             }],
             ..Default::default()
         };
-        match d.validate().expect_err("must fail") {
-            ValidateDirectoryError::InvalidNode(n, ValidateNodeError::InvalidName(_)) => {
-                assert_eq!(n, b"\x00")
-            }
-            _ => panic!("unexpected error"),
-        };
+        let e = crate::Directory::try_from(d).expect_err("must fail");
+        assert!(matches!(e, DirectoryError::InvalidName(_)));
     }
 
     {
@@ -230,12 +213,20 @@ fn validate_invalid_names() {
             }],
             ..Default::default()
         };
-        match d.validate().expect_err("must fail") {
-            ValidateDirectoryError::InvalidNode(n, ValidateNodeError::InvalidName(_)) => {
-                assert_eq!(n, b"foo/bar")
-            }
-            _ => panic!("unexpected error"),
+        let e = crate::Directory::try_from(d).expect_err("must fail");
+        assert!(matches!(e, DirectoryError::InvalidName(_)));
+    }
+
+    {
+        let d = Directory {
+            symlinks: vec![SymlinkNode {
+                name: bytes::Bytes::copy_from_slice("X".repeat(500).into_bytes().as_slice()),
+                target: "foo".into(),
+            }],
+            ..Default::default()
         };
+        let e = crate::Directory::try_from(d).expect_err("must fail");
+        assert!(matches!(e, DirectoryError::InvalidName(_)));
     }
 }
 
@@ -249,8 +240,8 @@ fn validate_invalid_digest() {
         }],
         ..Default::default()
     };
-    match d.validate().expect_err("must fail") {
-        ValidateDirectoryError::InvalidNode(_, ValidateNodeError::InvalidDigestLen(n)) => {
+    match crate::Directory::try_from(d).expect_err("must fail") {
+        DirectoryError::InvalidNode(_, ValidateNodeError::InvalidDigestLen(n)) => {
             assert_eq!(n, 2)
         }
         _ => panic!("unexpected error"),
@@ -276,15 +267,15 @@ fn validate_sorting() {
             ],
             ..Default::default()
         };
-        match d.validate().expect_err("must fail") {
-            ValidateDirectoryError::WrongSorting(s) => {
-                assert_eq!(s, b"a");
+        match crate::Directory::try_from(d).expect_err("must fail") {
+            DirectoryError::WrongSorting(s) => {
+                assert_eq!(s.as_ref(), b"a");
             }
             _ => panic!("unexpected error"),
         }
     }
 
-    // "a" exists twice, bad.
+    // "a" exists twice (same types), bad.
     {
         let d = Directory {
             directories: vec![
@@ -301,9 +292,31 @@ fn validate_sorting() {
             ],
             ..Default::default()
         };
-        match d.validate().expect_err("must fail") {
-            ValidateDirectoryError::DuplicateName(s) => {
-                assert_eq!(s, b"a");
+        match crate::Directory::try_from(d).expect_err("must fail") {
+            DirectoryError::DuplicateName(s) => {
+                assert_eq!(s.as_ref(), b"a");
+            }
+            _ => panic!("unexpected error"),
+        }
+    }
+
+    // "a" exists twice (different types), bad.
+    {
+        let d = Directory {
+            directories: vec![DirectoryNode {
+                name: "a".into(),
+                digest: DUMMY_DIGEST.to_vec().into(),
+                size: 42,
+            }],
+            symlinks: vec![SymlinkNode {
+                name: "a".into(),
+                target: "b".into(),
+            }],
+            ..Default::default()
+        };
+        match crate::Directory::try_from(d).expect_err("must fail") {
+            DirectoryError::DuplicateName(s) => {
+                assert_eq!(s.as_ref(), b"a");
             }
             _ => panic!("unexpected error"),
         }
@@ -327,7 +340,7 @@ fn validate_sorting() {
             ..Default::default()
         };
 
-        d.validate().expect("validate shouldn't error");
+        crate::Directory::try_from(d).expect("validate shouldn't error");
     }
 
     // [b, c] and [a] are both properly sorted.
@@ -352,101 +365,6 @@ fn validate_sorting() {
             ..Default::default()
         };
 
-        d.validate().expect("validate shouldn't error");
+        crate::Directory::try_from(d).expect("validate shouldn't error");
     }
 }
-
-#[test]
-fn validate_overflow() {
-    let d = Directory {
-        directories: vec![DirectoryNode {
-            name: "foo".into(),
-            digest: DUMMY_DIGEST.to_vec().into(),
-            size: u64::MAX,
-        }],
-        ..Default::default()
-    };
-
-    match d.validate().expect_err("must fail") {
-        ValidateDirectoryError::SizeOverflow => {}
-        _ => panic!("unexpected error"),
-    }
-}
-
-#[test]
-fn add_nodes_to_directory() {
-    let mut d = Directory {
-        ..Default::default()
-    };
-
-    d.add(node::Node::Directory(DirectoryNode {
-        name: "b".into(),
-        digest: DUMMY_DIGEST.to_vec().into(),
-        size: 1,
-    }));
-    d.add(node::Node::Directory(DirectoryNode {
-        name: "a".into(),
-        digest: DUMMY_DIGEST.to_vec().into(),
-        size: 1,
-    }));
-    d.add(node::Node::Directory(DirectoryNode {
-        name: "z".into(),
-        digest: DUMMY_DIGEST.to_vec().into(),
-        size: 1,
-    }));
-
-    d.add(node::Node::File(FileNode {
-        name: "f".into(),
-        digest: DUMMY_DIGEST.to_vec().into(),
-        size: 1,
-        executable: true,
-    }));
-    d.add(node::Node::File(FileNode {
-        name: "c".into(),
-        digest: DUMMY_DIGEST.to_vec().into(),
-        size: 1,
-        executable: true,
-    }));
-    d.add(node::Node::File(FileNode {
-        name: "g".into(),
-        digest: DUMMY_DIGEST.to_vec().into(),
-        size: 1,
-        executable: true,
-    }));
-
-    d.add(node::Node::Symlink(SymlinkNode {
-        name: "t".into(),
-        target: "a".into(),
-    }));
-    d.add(node::Node::Symlink(SymlinkNode {
-        name: "o".into(),
-        target: "a".into(),
-    }));
-    d.add(node::Node::Symlink(SymlinkNode {
-        name: "e".into(),
-        target: "a".into(),
-    }));
-
-    d.validate().expect("directory should be valid");
-}
-
-#[test]
-#[cfg_attr(not(debug_assertions), ignore)]
-#[should_panic = "name already exists in directories"]
-fn add_duplicate_node_to_directory_panics() {
-    let mut d = Directory {
-        ..Default::default()
-    };
-
-    d.add(node::Node::Directory(DirectoryNode {
-        name: "a".into(),
-        digest: DUMMY_DIGEST.to_vec().into(),
-        size: 1,
-    }));
-    d.add(node::Node::File(FileNode {
-        name: "a".into(),
-        digest: DUMMY_DIGEST.to_vec().into(),
-        size: 1,
-        executable: true,
-    }));
-}
diff --git a/tvix/castore/src/proto/tests/directory_nodes_iterator.rs b/tvix/castore/src/proto/tests/directory_nodes_iterator.rs
deleted file mode 100644
index 68f147a33210..000000000000
--- a/tvix/castore/src/proto/tests/directory_nodes_iterator.rs
+++ /dev/null
@@ -1,78 +0,0 @@
-use crate::proto::Directory;
-use crate::proto::DirectoryNode;
-use crate::proto::FileNode;
-use crate::proto::NamedNode;
-use crate::proto::SymlinkNode;
-
-#[test]
-fn iterator() {
-    let d = Directory {
-        directories: vec![
-            DirectoryNode {
-                name: "c".into(),
-                ..DirectoryNode::default()
-            },
-            DirectoryNode {
-                name: "d".into(),
-                ..DirectoryNode::default()
-            },
-            DirectoryNode {
-                name: "h".into(),
-                ..DirectoryNode::default()
-            },
-            DirectoryNode {
-                name: "l".into(),
-                ..DirectoryNode::default()
-            },
-        ],
-        files: vec![
-            FileNode {
-                name: "b".into(),
-                ..FileNode::default()
-            },
-            FileNode {
-                name: "e".into(),
-                ..FileNode::default()
-            },
-            FileNode {
-                name: "g".into(),
-                ..FileNode::default()
-            },
-            FileNode {
-                name: "j".into(),
-                ..FileNode::default()
-            },
-        ],
-        symlinks: vec![
-            SymlinkNode {
-                name: "a".into(),
-                ..SymlinkNode::default()
-            },
-            SymlinkNode {
-                name: "f".into(),
-                ..SymlinkNode::default()
-            },
-            SymlinkNode {
-                name: "i".into(),
-                ..SymlinkNode::default()
-            },
-            SymlinkNode {
-                name: "k".into(),
-                ..SymlinkNode::default()
-            },
-        ],
-    };
-
-    // We keep this strings here and convert to string to make the comparison
-    // less messy.
-    let mut node_names: Vec<String> = vec![];
-
-    for node in d.nodes() {
-        node_names.push(String::from_utf8(node.get_name().to_vec()).unwrap());
-    }
-
-    assert_eq!(
-        vec!["a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l"],
-        node_names
-    );
-}
diff --git a/tvix/castore/src/proto/tests/mod.rs b/tvix/castore/src/proto/tests/mod.rs
index 8d903bacb6c5..74334029e84c 100644
--- a/tvix/castore/src/proto/tests/mod.rs
+++ b/tvix/castore/src/proto/tests/mod.rs
@@ -1,2 +1 @@
 mod directory;
-mod directory_nodes_iterator;
diff --git a/tvix/castore/src/tests/import.rs b/tvix/castore/src/tests/import.rs
index 8b3bd5ce0ffc..32c2c363689f 100644
--- a/tvix/castore/src/tests/import.rs
+++ b/tvix/castore/src/tests/import.rs
@@ -2,15 +2,11 @@ use crate::blobservice::{self, BlobService};
 use crate::directoryservice;
 use crate::fixtures::*;
 use crate::import::fs::ingest_path;
-use crate::proto;
+use crate::Node;
 
-use std::sync::Arc;
 use tempfile::TempDir;
 
 #[cfg(target_family = "unix")]
-use std::os::unix::ffi::OsStrExt;
-
-#[cfg(target_family = "unix")]
 #[tokio::test]
 async fn symlink() {
     let blob_service = blobservice::from_addr("memory://").await.unwrap();
@@ -26,7 +22,7 @@ async fn symlink() {
     .unwrap();
 
     let root_node = ingest_path(
-        Arc::from(blob_service),
+        blob_service,
         directory_service,
         tmpdir.path().join("doesntmatter"),
     )
@@ -34,18 +30,16 @@ async fn symlink() {
     .expect("must succeed");
 
     assert_eq!(
-        proto::node::Node::Symlink(proto::SymlinkNode {
-            name: "doesntmatter".into(),
-            target: "/nix/store/somewhereelse".into(),
-        }),
+        Node::Symlink {
+            target: "/nix/store/somewhereelse".try_into().unwrap()
+        },
         root_node,
     )
 }
 
 #[tokio::test]
 async fn single_file() {
-    let blob_service =
-        Arc::from(blobservice::from_addr("memory://").await.unwrap()) as Arc<dyn BlobService>;
+    let blob_service = blobservice::from_addr("memory://").await.unwrap();
     let directory_service = directoryservice::from_addr("memory://").await.unwrap();
 
     let tmpdir = TempDir::new().unwrap();
@@ -61,12 +55,11 @@ async fn single_file() {
     .expect("must succeed");
 
     assert_eq!(
-        proto::node::Node::File(proto::FileNode {
-            name: "root".into(),
-            digest: HELLOWORLD_BLOB_DIGEST.clone().into(),
+        Node::File {
+            digest: HELLOWORLD_BLOB_DIGEST.clone(),
             size: HELLOWORLD_BLOB_CONTENTS.len() as u64,
             executable: false,
-        }),
+        },
         root_node,
     );
 
@@ -77,8 +70,7 @@ async fn single_file() {
 #[cfg(target_family = "unix")]
 #[tokio::test]
 async fn complicated() {
-    let blob_service =
-        Arc::from(blobservice::from_addr("memory://").await.unwrap()) as Arc<dyn BlobService>;
+    let blob_service = blobservice::from_addr("memory://").await.unwrap();
     let directory_service = directoryservice::from_addr("memory://").await.unwrap();
 
     let tmpdir = TempDir::new().unwrap();
@@ -98,17 +90,10 @@ async fn complicated() {
 
     // ensure root_node matched expectations
     assert_eq!(
-        proto::node::Node::Directory(proto::DirectoryNode {
-            name: tmpdir
-                .path()
-                .file_name()
-                .unwrap()
-                .as_bytes()
-                .to_owned()
-                .into(),
-            digest: DIRECTORY_COMPLICATED.digest().into(),
+        Node::Directory {
+            digest: DIRECTORY_COMPLICATED.digest().clone(),
             size: DIRECTORY_COMPLICATED.size(),
-        }),
+        },
         root_node,
     );
 
diff --git a/tvix/castore/src/tonic.rs b/tvix/castore/src/tonic.rs
index 4b65d6b028ef..e63e1ad7aab8 100644
--- a/tvix/castore/src/tonic.rs
+++ b/tvix/castore/src/tonic.rs
@@ -1,3 +1,4 @@
+use hyper_util::rt::TokioIo;
 use tokio::net::UnixStream;
 use tonic::transport::{Channel, Endpoint};
 
@@ -25,7 +26,10 @@ pub async fn channel_from_url(url: &url::Url) -> Result<Channel, self::Error> {
 
             let connector = tower::service_fn({
                 let url = url.clone();
-                move |_: tonic::transport::Uri| UnixStream::connect(url.path().to_string().clone())
+                move |_: tonic::transport::Uri| {
+                    let unix = UnixStream::connect(url.path().to_string().clone());
+                    async move { Ok::<_, std::io::Error>(TokioIo::new(unix.await?)) }
+                }
             });
 
             // the URL doesn't matter
diff --git a/tvix/cli/Cargo.toml b/tvix/cli/Cargo.toml
index 1fa23518228a..27cd5a6b395c 100644
--- a/tvix/cli/Cargo.toml
+++ b/tvix/cli/Cargo.toml
@@ -14,14 +14,25 @@ tvix-castore = { path = "../castore" }
 tvix-store = { path = "../store", default-features = false, features = []}
 tvix-eval = { path = "../eval" }
 tvix-glue = { path = "../glue" }
-bytes = "1.4.0"
-clap = { version = "4.0", features = ["derive", "env"] }
-dirs = "4.0.0"
-rustyline = "10.0.0"
-thiserror = "1.0.38"
-tokio = "1.28.0"
-tracing = { version = "0.1.37", features = ["max_level_trace", "release_max_level_info"] }
-tracing-subscriber = "0.3.16"
+tvix-tracing = { path = "../tracing" }
+bytes = { workspace = true }
+clap = { workspace = true, features = ["derive", "env"] }
+dirs = { workspace = true }
+rustyline = { workspace = true }
+rnix = { workspace = true }
+rowan = { workspace = true }
+smol_str = { workspace = true }
+thiserror = { workspace = true }
+tokio = { workspace = true }
+tracing = { workspace = true }
+tracing-indicatif = { workspace = true }
+rustc-hash = { workspace = true }
+mimalloc = { workspace = true }
+wu-manber = { workspace = true }
 
-[dependencies.wu-manber]
-git = "https://github.com/tvlfyi/wu-manber.git"
+[dev-dependencies]
+expect-test = { workspace = true }
+
+[features]
+default = []
+tracy = ["tvix-tracing/tracy"]
diff --git a/tvix/cli/default.nix b/tvix/cli/default.nix
index 62e93cc21333..175e088c2dea 100644
--- a/tvix/cli/default.nix
+++ b/tvix/cli/default.nix
@@ -2,6 +2,9 @@
 
 (depot.tvix.crates.workspaceMembers.tvix-cli.build.override {
   runTests = true;
+  testPreRun = ''
+    export SSL_CERT_FILE=/dev/null
+  '';
 }).overrideAttrs (finalAttrs: previousAttrs:
 
 let
@@ -27,7 +30,7 @@ let
   mkExprBenchmark = { expr, description }:
     let name = "tvix-cli-benchmark-${description}"; in
     (pkgs.runCommand name { } ''
-      export SSL_CERT_FILE=${pkgs.cacert.out}/etc/ssl/certs/ca-bundle.crt
+      export SSL_CERT_FILE=/dev/null
       ${lib.escapeShellArgs [
         "${pkgs.time}/bin/time"
         "--format" "${benchmark-gnutime-format-string description}"
@@ -46,24 +49,29 @@ let
 
   # Constructs a Derivation invoking tvix-cli inside a build, ensures the
   # calculated tvix output path matches what's passed in externally.
-  mkNixpkgsEvalTest = attrpath: expectedPath:
-    let
-      name = "tvix-eval-test-${builtins.replaceStrings [".drv"] ["-drv"] attrpath}";
-    in
-    (pkgs.runCommand name { } ''
-      export SSL_CERT_FILE=${pkgs.cacert.out}/etc/ssl/certs/ca-bundle.crt
-      TVIX_OUTPUT=$(${tvix-cli}/bin/tvix -E '(import ${pkgs.path} {}).${attrpath}')
-      EXPECTED='${/* the verbatim expected Tvix output: */ "=> \"${builtins.unsafeDiscardStringContext expectedPath}\" :: string"}'
+  mkNixpkgsEvalTest =
+    { attrPath ? null # An attribute that must already be accessible from `pkgs`. Should evaluate to a store path.
+    , expr ? null # A Nix expression that should evaluate to a store path.
+    , expectedPath # The expected store path that should match one of the above.
+    }:
+      assert lib.assertMsg (attrPath != null || expr != null) "Either 'attrPath' or 'expr' must be set.";
+      let
+        name = "tvix-eval-test-${builtins.replaceStrings [".drv"] ["-drv"] (if expr != null then "custom-expr" else attrPath)}";
+      in
+      (pkgs.runCommand name { } ''
+        export SSL_CERT_FILE=/dev/null
+        TVIX_OUTPUT=$(${tvix-cli}/bin/tvix --no-warnings -E '${if expr != null then expr else "(import ${pkgs.path} {}).${attrPath}"}')
+        EXPECTED='${/* the verbatim expected Tvix output: */ "=> \"${builtins.unsafeDiscardStringContext expectedPath}\" :: string"}'
 
-      echo "Tvix output: ''${TVIX_OUTPUT}"
-      if [ "$TVIX_OUTPUT" != "$EXPECTED" ]; then
-        echo "Correct would have been ''${EXPECTED}"
-        exit 1
-      fi
+        echo "Tvix output: ''${TVIX_OUTPUT}"
+        if [ "$TVIX_OUTPUT" != "$EXPECTED" ]; then
+          echo "Correct would have been ''${EXPECTED}"
+          exit 1
+        fi
 
-      echo "Output was correct."
-      touch $out
-    '');
+        echo "Output was correct."
+        touch $out
+      '');
 
 
   benchmarks = {
@@ -76,13 +84,24 @@ let
   };
 
   evalTests = {
-    eval-nixpkgs-stdenv-drvpath = (mkNixpkgsEvalTest "stdenv.drvPath" pkgs.stdenv.drvPath);
-    eval-nixpkgs-stdenv-outpath = (mkNixpkgsEvalTest "stdenv.outPath" pkgs.stdenv.outPath);
-    eval-nixpkgs-hello-outpath = (mkNixpkgsEvalTest "hello.outPath" pkgs.hello.outPath);
-    eval-nixpkgs-firefox-outpath = (mkNixpkgsEvalTest "firefox.outPath" pkgs.firefox.outPath);
-    eval-nixpkgs-firefox-drvpath = (mkNixpkgsEvalTest "firefox.drvPath" pkgs.firefox.drvPath);
-    eval-nixpkgs-cross-stdenv-outpath = (mkNixpkgsEvalTest "pkgsCross.aarch64-multiplatform.stdenv.outPath" pkgs.pkgsCross.aarch64-multiplatform.stdenv.outPath);
-    eval-nixpkgs-cross-hello-outpath = (mkNixpkgsEvalTest "pkgsCross.aarch64-multiplatform.hello.outPath" pkgs.pkgsCross.aarch64-multiplatform.hello.outPath);
+    eval-nixpkgs-stdenv-drvpath = (mkNixpkgsEvalTest { attrPath = "stdenv.drvPath"; expectedPath = pkgs.stdenv.drvPath; });
+    eval-nixpkgs-stdenv-outpath = (mkNixpkgsEvalTest { attrPath = "stdenv.outPath"; expectedPath = pkgs.stdenv.outPath; });
+    eval-nixpkgs-hello-outpath = (mkNixpkgsEvalTest { attrPath = "hello.outPath"; expectedPath = pkgs.hello.outPath; });
+    eval-nixpkgs-firefox-outpath = (mkNixpkgsEvalTest { attrPath = "firefox.outPath"; expectedPath = pkgs.firefox.outPath; });
+    eval-nixpkgs-firefox-drvpath = (mkNixpkgsEvalTest { attrPath = "firefox.drvPath"; expectedPath = pkgs.firefox.drvPath; });
+    eval-nixpkgs-cross-stdenv-outpath = (mkNixpkgsEvalTest { attrPath = "pkgsCross.aarch64-multiplatform.stdenv.outPath"; expectedPath = pkgs.pkgsCross.aarch64-multiplatform.stdenv.outPath; });
+    eval-nixpkgs-cross-hello-outpath = (mkNixpkgsEvalTest { attrPath = "pkgsCross.aarch64-multiplatform.hello.outPath"; expectedPath = pkgs.pkgsCross.aarch64-multiplatform.hello.outPath; });
+    # Our CI runner currently uses Nix version lower than 2.12, which means it uses the old JSON library.
+    # The NixOS docs generate a JSON file with all the NixOS options, and so output is different between Tvix (and Nix 2.12+) and our CI runner's Nix version,
+    # so we disable the NixOS docs generation for now. TODO(kranzes): Re-enable NixOS docs once the CI runner is using a newer Nix version.
+    eval-nixpkgs-nixos-gnome-installer-drvpath = (mkNixpkgsEvalTest {
+      expr = "(import ${pkgs.path}/nixos/release.nix { configuration = { documentation.nixos.enable = (import ${pkgs.path}/lib).mkForce false; }; }).iso_gnome.${pkgs.system}.drvPath";
+      expectedPath = (import "${pkgs.path}/nixos/release.nix" { configuration.documentation.nixos.enable = lib.mkForce false; }).iso_gnome.${pkgs.system}.drvPath;
+    });
+    eval-nixpkgs-nixos-gnome-installer-outpath = (mkNixpkgsEvalTest {
+      expr = "(import ${pkgs.path}/nixos/release.nix { configuration = { documentation.nixos.enable = (import ${pkgs.path}/lib).mkForce false; }; }).iso_gnome.${pkgs.system}.outPath";
+      expectedPath = (import "${pkgs.path}/nixos/release.nix" { configuration.documentation.nixos.enable = lib.mkForce false; }).iso_gnome.${pkgs.system}.outPath;
+    });
   };
 in
 {
@@ -91,5 +110,5 @@ in
   };
 
   # Expose benchmarks and evalTests as standard CI targets.
-  passthru = benchmarks // evalTests;
+  passthru = previousAttrs.passthru // benchmarks // evalTests;
 })
diff --git a/tvix/cli/src/args.rs b/tvix/cli/src/args.rs
new file mode 100644
index 000000000000..36f9a6a262dd
--- /dev/null
+++ b/tvix/cli/src/args.rs
@@ -0,0 +1,86 @@
+use std::path::PathBuf;
+
+use clap::Parser;
+use tracing::Level;
+use tvix_store::utils::ServiceUrlsMemory;
+
+/// Provides a CLI interface to trigger evaluation using tvix-eval.
+///
+/// Uses configured tvix-[ca]store and tvix-build components,
+/// and by default a set of builtins similar to these present in Nix.
+///
+/// None of the stores available add to the local `/nix/store` location.
+///
+/// The CLI interface is not stable and subject to change.
+#[derive(Parser, Clone)]
+pub struct Args {
+    /// A global log level to use when printing logs.
+    /// It's also possible to set `RUST_LOG` according to
+    /// `tracing_subscriber::filter::EnvFilter`, which will always have
+    /// priority.
+    #[arg(long, default_value_t=Level::INFO)]
+    pub log_level: Level,
+
+    /// Path to a script to evaluate
+    pub script: Option<PathBuf>,
+
+    #[clap(long, short = 'E')]
+    pub expr: Option<String>,
+
+    /// Dump the raw AST to stdout before interpreting
+    #[clap(long, env = "TVIX_DISPLAY_AST")]
+    pub display_ast: bool,
+
+    /// Dump the bytecode to stdout before evaluating
+    #[clap(long, env = "TVIX_DUMP_BYTECODE")]
+    pub dump_bytecode: bool,
+
+    /// Trace the runtime of the VM
+    #[clap(long, env = "TVIX_TRACE_RUNTIME")]
+    pub trace_runtime: bool,
+
+    /// Capture the time (relative to the start time of evaluation) of all events traced with
+    /// `--trace-runtime`
+    #[clap(long, env = "TVIX_TRACE_RUNTIME_TIMING", requires("trace_runtime"))]
+    pub trace_runtime_timing: bool,
+
+    /// Only compile, but do not execute code. This will make Tvix act
+    /// sort of like a linter.
+    #[clap(long)]
+    pub compile_only: bool,
+
+    /// Don't print warnings.
+    #[clap(long)]
+    pub no_warnings: bool,
+
+    /// A colon-separated list of directories to use to resolve `<...>`-style paths
+    #[clap(long, short = 'I', env = "NIX_PATH")]
+    pub nix_search_path: Option<String>,
+
+    /// Print "raw" (unquoted) output.
+    #[clap(long)]
+    pub raw: bool,
+
+    /// Strictly evaluate values, traversing them and forcing e.g.
+    /// elements of lists and attribute sets before printing the
+    /// return value.
+    #[clap(long)]
+    pub strict: bool,
+
+    #[clap(flatten)]
+    pub service_addrs: ServiceUrlsMemory,
+
+    #[arg(long, env, default_value = "dummy://")]
+    pub build_service_addr: String,
+
+    /// An optional path in which Derivations encountered during evaluation
+    /// are dumped into, after evaluation. If it doesn't exist, the directory is created.
+    ///
+    /// Files dumped there are named like they would show up in `/nix/store`,
+    /// if produced by Nix. Existing files are not overwritten.
+    ///
+    /// This is only for debugging and diffing purposes for post-eval inspection;
+    /// Tvix does not read from these.
+    #[clap(long)]
+    pub drv_dumpdir: Option<PathBuf>,
+}
diff --git a/tvix/cli/src/assignment.rs b/tvix/cli/src/assignment.rs
new file mode 100644
index 000000000000..6fd9725d2956
--- /dev/null
+++ b/tvix/cli/src/assignment.rs
@@ -0,0 +1,74 @@
+use rnix::{Root, SyntaxKind, SyntaxNode};
+use rowan::ast::AstNode;
+
+/// An assignment of an identifier to a value in the context of a REPL.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub(crate) struct Assignment<'a> {
+    pub(crate) ident: &'a str,
+    pub(crate) value: rnix::ast::Expr,
+}
+
+impl<'a> Assignment<'a> {
+    /// Try to parse an [`Assignment`] from the given input string.
+    ///
+    /// Returns [`None`] if the parsing fails for any reason, since the intent is for us to
+    /// fall-back to trying to parse the input as a regular expression or other REPL commands for
+    /// any reason, since the intent is for us to fall-back to trying to parse the input as a
+    /// regular expression or other REPL command.
+    pub fn parse(input: &'a str) -> Option<Self> {
+        let mut tt = rnix::tokenizer::Tokenizer::new(input);
+        macro_rules! next {
+            ($kind:ident) => {{
+                loop {
+                    let (kind, tok) = tt.next()?;
+                    if kind == SyntaxKind::TOKEN_WHITESPACE {
+                        continue;
+                    }
+                    if kind != SyntaxKind::$kind {
+                        return None;
+                    }
+                    break tok;
+                }
+            }};
+        }
+
+        let ident = next!(TOKEN_IDENT);
+        let _equal = next!(TOKEN_ASSIGN);
+        let (green, errs) = rnix::parser::parse(tt);
+        let value = Root::cast(SyntaxNode::new_root(green))?.expr()?;
+
+        if !errs.is_empty() {
+            return None;
+        }
+
+        Some(Self { ident, value })
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn simple_assignments() {
+        for input in ["x = 4", "x     =       \t\t\n\t4", "x=4"] {
+            let res = Assignment::parse(input).unwrap();
+            assert_eq!(res.ident, "x");
+            assert_eq!(res.value.to_string(), "4");
+        }
+    }
+
+    #[test]
+    fn complex_exprs() {
+        let input = "x = { y = 4; z = let q = 7; in [ q (y // { z = 9; }) ]; }";
+        let res = Assignment::parse(input).unwrap();
+        assert_eq!(res.ident, "x");
+    }
+
+    #[test]
+    fn not_an_assignment() {
+        let input = "{ x = 4; }";
+        let res = Assignment::parse(input);
+        assert!(res.is_none(), "{input:?}");
+    }
+}
diff --git a/tvix/cli/src/lib.rs b/tvix/cli/src/lib.rs
new file mode 100644
index 000000000000..beb4c505207c
--- /dev/null
+++ b/tvix/cli/src/lib.rs
@@ -0,0 +1,270 @@
+use std::path::PathBuf;
+use std::rc::Rc;
+
+use rustc_hash::FxHashMap;
+use smol_str::SmolStr;
+use std::fmt::Write;
+use tracing::{instrument, Span};
+use tracing_indicatif::span_ext::IndicatifSpanExt;
+use tvix_build::buildservice;
+use tvix_eval::{
+    builtins::impure_builtins,
+    observer::{DisassemblingObserver, TracingObserver},
+    ErrorKind, EvalIO, GlobalsMap, SourceCode, Value,
+};
+use tvix_glue::{
+    builtins::{add_derivation_builtins, add_fetcher_builtins, add_import_builtins},
+    configure_nix_path,
+    tvix_io::TvixIO,
+    tvix_store_io::TvixStoreIO,
+};
+
+pub mod args;
+pub mod assignment;
+pub mod repl;
+
+pub use args::Args;
+pub use repl::Repl;
+
+pub fn init_io_handle(tokio_runtime: &tokio::runtime::Runtime, args: &Args) -> Rc<TvixStoreIO> {
+    let (blob_service, directory_service, path_info_service, nar_calculation_service) =
+        tokio_runtime
+            .block_on(tvix_store::utils::construct_services(
+                args.service_addrs.clone(),
+            ))
+            .expect("unable to setup {blob|directory|pathinfo}service before interpreter setup");
+
+    let build_service = tokio_runtime
+        .block_on({
+            let blob_service = blob_service.clone();
+            let directory_service = directory_service.clone();
+            async move {
+                buildservice::from_addr(
+                    &args.build_service_addr,
+                    blob_service.clone(),
+                    directory_service.clone(),
+                )
+                .await
+            }
+        })
+        .expect("unable to setup buildservice before interpreter setup");
+
+    Rc::new(TvixStoreIO::new(
+        blob_service.clone(),
+        directory_service.clone(),
+        path_info_service,
+        nar_calculation_service.into(),
+        build_service.into(),
+        tokio_runtime.handle().clone(),
+    ))
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
+pub enum AllowIncomplete {
+    Allow,
+    #[default]
+    RequireComplete,
+}
+
+impl AllowIncomplete {
+    fn allow(&self) -> bool {
+        matches!(self, Self::Allow)
+    }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct IncompleteInput;
+
+pub struct EvalResult {
+    value: Option<Value>,
+    globals: Rc<GlobalsMap>,
+}
+
+/// Interprets the given code snippet, printing out warnings and errors and returning the result
+#[allow(clippy::too_many_arguments)]
+pub fn evaluate(
+    tvix_store_io: Rc<TvixStoreIO>,
+    code: &str,
+    path: Option<PathBuf>,
+    args: &Args,
+    allow_incomplete: AllowIncomplete,
+    env: Option<&FxHashMap<SmolStr, Value>>,
+    globals: Option<Rc<GlobalsMap>>,
+    source_map: Option<SourceCode>,
+) -> Result<EvalResult, IncompleteInput> {
+    let span = Span::current();
+    span.pb_start();
+    span.pb_set_style(&tvix_tracing::PB_SPINNER_STYLE);
+    span.pb_set_message("Setting up evaluatorโ€ฆ");
+
+    let mut eval_builder = tvix_eval::Evaluation::builder(Box::new(TvixIO::new(
+        tvix_store_io.clone() as Rc<dyn EvalIO>,
+    )) as Box<dyn EvalIO>)
+    .enable_import()
+    .with_strict(args.strict)
+    .env(env);
+
+    match globals {
+        Some(globals) => {
+            eval_builder = eval_builder.with_globals(globals);
+        }
+        None => {
+            eval_builder = eval_builder.add_builtins(impure_builtins());
+            eval_builder = add_derivation_builtins(eval_builder, Rc::clone(&tvix_store_io));
+            eval_builder = add_fetcher_builtins(eval_builder, Rc::clone(&tvix_store_io));
+            eval_builder = add_import_builtins(eval_builder, Rc::clone(&tvix_store_io));
+        }
+    };
+    eval_builder = configure_nix_path(eval_builder, &args.nix_search_path);
+
+    if let Some(source_map) = source_map {
+        eval_builder = eval_builder.with_source_map(source_map);
+    }
+
+    let source_map = eval_builder.source_map().clone();
+    let (result, globals) = {
+        let mut compiler_observer =
+            DisassemblingObserver::new(source_map.clone(), std::io::stderr());
+        if args.dump_bytecode {
+            eval_builder.set_compiler_observer(Some(&mut compiler_observer));
+        }
+
+        let mut runtime_observer = TracingObserver::new(std::io::stderr());
+        if args.trace_runtime {
+            if args.trace_runtime_timing {
+                runtime_observer.enable_timing()
+            }
+            eval_builder.set_runtime_observer(Some(&mut runtime_observer));
+        }
+
+        span.pb_set_message("Evaluatingโ€ฆ");
+
+        let eval = eval_builder.build();
+        let globals = eval.globals();
+        let result = eval.evaluate(code, path);
+        (result, globals)
+    };
+
+    if allow_incomplete.allow()
+        && result.errors.iter().any(|err| {
+            matches!(
+                &err.kind,
+                ErrorKind::ParseErrors(pes)
+                    if pes.iter().any(|pe| matches!(pe, rnix::parser::ParseError::UnexpectedEOF))
+            )
+        })
+    {
+        return Err(IncompleteInput);
+    }
+
+    if args.display_ast {
+        if let Some(ref expr) = result.expr {
+            eprintln!("AST: {}", tvix_eval::pretty_print_expr(expr));
+        }
+    }
+
+    for error in &result.errors {
+        error.fancy_format_stderr();
+    }
+
+    if !args.no_warnings {
+        for warning in &result.warnings {
+            warning.fancy_format_stderr(&source_map);
+        }
+    }
+
+    if let Some(dumpdir) = &args.drv_dumpdir {
+        // Dump all known derivations files to `dumpdir`.
+        std::fs::create_dir_all(dumpdir).expect("failed to create drv dumpdir");
+        tvix_store_io
+            .known_paths
+            .borrow()
+            .get_derivations()
+            // Skip already dumped derivations.
+            .filter(|(drv_path, _)| !dumpdir.join(drv_path.to_string()).exists())
+            .for_each(|(drv_path, drv)| {
+                std::fs::write(dumpdir.join(drv_path.to_string()), drv.to_aterm_bytes())
+                    .expect("failed to write drv to dumpdir");
+            })
+    }
+
+    Ok(EvalResult {
+        globals,
+        value: result.value,
+    })
+}
+
+pub struct InterpretResult {
+    output: String,
+    success: bool,
+    pub(crate) globals: Option<Rc<GlobalsMap>>,
+}
+
+impl InterpretResult {
+    pub fn empty_success(globals: Option<Rc<GlobalsMap>>) -> Self {
+        Self {
+            output: String::new(),
+            success: true,
+            globals,
+        }
+    }
+
+    pub fn finalize(self) -> bool {
+        print!("{}", self.output);
+        self.success
+    }
+
+    pub fn output(&self) -> &str {
+        &self.output
+    }
+
+    pub fn success(&self) -> bool {
+        self.success
+    }
+}
+
+/// Interprets the given code snippet, printing out warnings, errors
+/// and the result itself. The return value indicates whether
+/// evaluation succeeded.
+#[instrument(skip_all, fields(indicatif.pb_show=1))]
+#[allow(clippy::too_many_arguments)]
+pub fn interpret(
+    tvix_store_io: Rc<TvixStoreIO>,
+    code: &str,
+    path: Option<PathBuf>,
+    args: &Args,
+    explain: bool,
+    allow_incomplete: AllowIncomplete,
+    env: Option<&FxHashMap<SmolStr, Value>>,
+    globals: Option<Rc<GlobalsMap>>,
+    source_map: Option<SourceCode>,
+) -> Result<InterpretResult, IncompleteInput> {
+    let mut output = String::new();
+    let result = evaluate(
+        tvix_store_io,
+        code,
+        path,
+        args,
+        allow_incomplete,
+        env,
+        globals,
+        source_map,
+    )?;
+
+    if let Some(value) = result.value.as_ref() {
+        if explain {
+            writeln!(&mut output, "=> {}", value.explain()).unwrap();
+        } else if args.raw {
+            writeln!(&mut output, "{}", value.to_contextful_str().unwrap()).unwrap();
+        } else {
+            writeln!(&mut output, "=> {} :: {}", value, value.type_of()).unwrap();
+        }
+    }
+
+    // inform the caller about any errors
+    Ok(InterpretResult {
+        output,
+        success: result.value.is_some(),
+        globals: Some(result.globals),
+    })
+}
diff --git a/tvix/cli/src/main.rs b/tvix/cli/src/main.rs
index d66d2ce4cbb6..379dd915cbfb 100644
--- a/tvix/cli/src/main.rs
+++ b/tvix/cli/src/main.rs
@@ -1,214 +1,34 @@
 use clap::Parser;
-use rustyline::{error::ReadlineError, Editor};
+use mimalloc::MiMalloc;
 use std::rc::Rc;
 use std::{fs, path::PathBuf};
-use tracing::Level;
-use tracing_subscriber::fmt::writer::MakeWriterExt;
-use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
-use tracing_subscriber::{EnvFilter, Layer};
-use tvix_build::buildservice;
-use tvix_eval::builtins::impure_builtins;
-use tvix_eval::observer::{DisassemblingObserver, TracingObserver};
-use tvix_eval::{EvalIO, Value};
-use tvix_glue::builtins::add_fetcher_builtins;
-use tvix_glue::builtins::add_import_builtins;
-use tvix_glue::tvix_io::TvixIO;
+use tvix_cli::args::Args;
+use tvix_cli::repl::Repl;
+use tvix_cli::{init_io_handle, interpret, AllowIncomplete};
+use tvix_eval::observer::DisassemblingObserver;
 use tvix_glue::tvix_store_io::TvixStoreIO;
-use tvix_glue::{builtins::add_derivation_builtins, configure_nix_path};
 
-#[derive(Parser)]
-struct Args {
-    #[arg(long)]
-    log_level: Option<Level>,
-
-    /// Path to a script to evaluate
-    script: Option<PathBuf>,
-
-    #[clap(long, short = 'E')]
-    expr: Option<String>,
-
-    /// Dump the raw AST to stdout before interpreting
-    #[clap(long, env = "TVIX_DISPLAY_AST")]
-    display_ast: bool,
-
-    /// Dump the bytecode to stdout before evaluating
-    #[clap(long, env = "TVIX_DUMP_BYTECODE")]
-    dump_bytecode: bool,
-
-    /// Trace the runtime of the VM
-    #[clap(long, env = "TVIX_TRACE_RUNTIME")]
-    trace_runtime: bool,
-
-    /// Capture the time (relative to the start time of evaluation) of all events traced with
-    /// `--trace-runtime`
-    #[clap(long, env = "TVIX_TRACE_RUNTIME_TIMING", requires("trace_runtime"))]
-    trace_runtime_timing: bool,
-
-    /// Only compile, but do not execute code. This will make Tvix act
-    /// sort of like a linter.
-    #[clap(long)]
-    compile_only: bool,
-
-    /// Don't print warnings.
-    #[clap(long)]
-    no_warnings: bool,
-
-    /// A colon-separated list of directories to use to resolve `<...>`-style paths
-    #[clap(long, short = 'I', env = "NIX_PATH")]
-    nix_search_path: Option<String>,
-
-    /// Print "raw" (unquoted) output.
-    #[clap(long)]
-    raw: bool,
-
-    /// Strictly evaluate values, traversing them and forcing e.g.
-    /// elements of lists and attribute sets before printing the
-    /// return value.
-    #[clap(long)]
-    strict: bool,
-
-    #[arg(long, env, default_value = "memory://")]
-    blob_service_addr: String,
-
-    #[arg(long, env, default_value = "memory://")]
-    directory_service_addr: String,
-
-    #[arg(long, env, default_value = "memory://")]
-    path_info_service_addr: String,
-
-    #[arg(long, env, default_value = "dummy://")]
-    build_service_addr: String,
-}
-
-fn init_io_handle(tokio_runtime: &tokio::runtime::Runtime, args: &Args) -> Rc<TvixStoreIO> {
-    let (blob_service, directory_service, path_info_service, nar_calculation_service) =
-        tokio_runtime
-            .block_on({
-                let blob_service_addr = args.blob_service_addr.clone();
-                let directory_service_addr = args.directory_service_addr.clone();
-                let path_info_service_addr = args.path_info_service_addr.clone();
-                async move {
-                    tvix_store::utils::construct_services(
-                        blob_service_addr,
-                        directory_service_addr,
-                        path_info_service_addr,
-                    )
-                    .await
-                }
-            })
-            .expect("unable to setup {blob|directory|pathinfo}service before interpreter setup");
-
-    let build_service = tokio_runtime
-        .block_on({
-            let blob_service = blob_service.clone();
-            let directory_service = directory_service.clone();
-            async move {
-                buildservice::from_addr(
-                    &args.build_service_addr,
-                    blob_service.clone(),
-                    directory_service.clone(),
-                )
-                .await
-            }
-        })
-        .expect("unable to setup buildservice before interpreter setup");
-
-    Rc::new(TvixStoreIO::new(
-        blob_service.clone(),
-        directory_service.clone(),
-        path_info_service.into(),
-        nar_calculation_service.into(),
-        build_service.into(),
-        tokio_runtime.handle().clone(),
-    ))
-}
-
-/// Interprets the given code snippet, printing out warnings, errors
-/// and the result itself. The return value indicates whether
-/// evaluation succeeded.
-fn interpret(
-    tvix_store_io: Rc<TvixStoreIO>,
-    code: &str,
-    path: Option<PathBuf>,
-    args: &Args,
-    explain: bool,
-) -> bool {
-    let mut eval = tvix_eval::Evaluation::new(
-        Box::new(TvixIO::new(tvix_store_io.clone() as Rc<dyn EvalIO>)) as Box<dyn EvalIO>,
-        true,
-    );
-    eval.strict = args.strict;
-    eval.builtins.extend(impure_builtins());
-    add_derivation_builtins(&mut eval, Rc::clone(&tvix_store_io));
-    add_fetcher_builtins(&mut eval, Rc::clone(&tvix_store_io));
-    add_import_builtins(&mut eval, tvix_store_io);
-    configure_nix_path(&mut eval, &args.nix_search_path);
-
-    let source_map = eval.source_map();
-    let result = {
-        let mut compiler_observer =
-            DisassemblingObserver::new(source_map.clone(), std::io::stderr());
-        if args.dump_bytecode {
-            eval.compiler_observer = Some(&mut compiler_observer);
-        }
-
-        let mut runtime_observer = TracingObserver::new(std::io::stderr());
-        if args.trace_runtime {
-            if args.trace_runtime_timing {
-                runtime_observer.enable_timing()
-            }
-            eval.runtime_observer = Some(&mut runtime_observer);
-        }
-
-        eval.evaluate(code, path)
-    };
-
-    if args.display_ast {
-        if let Some(ref expr) = result.expr {
-            eprintln!("AST: {}", tvix_eval::pretty_print_expr(expr));
-        }
-    }
-
-    for error in &result.errors {
-        error.fancy_format_stderr();
-    }
-
-    if !args.no_warnings {
-        for warning in &result.warnings {
-            warning.fancy_format_stderr(&source_map);
-        }
-    }
-
-    if let Some(value) = result.value.as_ref() {
-        if explain {
-            println!("=> {}", value.explain());
-        } else {
-            println_result(value, args.raw);
-        }
-    }
-
-    // inform the caller about any errors
-    result.errors.is_empty()
-}
+#[global_allocator]
+static GLOBAL: MiMalloc = MiMalloc;
 
 /// Interpret the given code snippet, but only run the Tvix compiler
 /// on it and return errors and warnings.
 fn lint(code: &str, path: Option<PathBuf>, args: &Args) -> bool {
-    let mut eval = tvix_eval::Evaluation::new_impure();
-    eval.strict = args.strict;
+    let mut eval_builder = tvix_eval::Evaluation::builder_impure().with_strict(args.strict);
 
-    let source_map = eval.source_map();
+    let source_map = eval_builder.source_map().clone();
 
     let mut compiler_observer = DisassemblingObserver::new(source_map.clone(), std::io::stderr());
 
     if args.dump_bytecode {
-        eval.compiler_observer = Some(&mut compiler_observer);
+        eval_builder.set_compiler_observer(Some(&mut compiler_observer));
     }
 
     if args.trace_runtime {
         eprintln!("warning: --trace-runtime has no effect with --compile-only!");
     }
 
+    let eval = eval_builder.build();
     let result = eval.compile_only(code, path);
 
     if args.display_ast {
@@ -232,24 +52,11 @@ fn lint(code: &str, path: Option<PathBuf>, args: &Args) -> bool {
 fn main() {
     let args = Args::parse();
 
-    // configure log settings
-    let level = args.log_level.unwrap_or(Level::INFO);
-
-    let subscriber = tracing_subscriber::registry().with(
-        tracing_subscriber::fmt::Layer::new()
-            .with_writer(std::io::stderr.with_max_level(level))
-            .compact()
-            .with_filter(
-                EnvFilter::builder()
-                    .with_default_directive(level.into())
-                    .from_env()
-                    .expect("invalid RUST_LOG"),
-            ),
-    );
-    subscriber
-        .try_init()
+    let _ = tvix_tracing::TracingBuilder::default()
+        .level(args.log_level)
+        .enable_progressbar()
+        .build()
         .expect("unable to set up tracing subscriber");
-
     let tokio_runtime = tokio::runtime::Runtime::new().expect("failed to setup tokio runtime");
 
     let io_handle = init_io_handle(&tokio_runtime, &args);
@@ -257,11 +64,25 @@ fn main() {
     if let Some(file) = &args.script {
         run_file(io_handle, file.clone(), &args)
     } else if let Some(expr) = &args.expr {
-        if !interpret(io_handle, expr, None, &args, false) {
+        if !interpret(
+            io_handle,
+            expr,
+            None,
+            &args,
+            false,
+            AllowIncomplete::RequireComplete,
+            None, // TODO(aspen): Pass in --arg/--argstr here
+            None,
+            None,
+        )
+        .unwrap()
+        .finalize()
+        {
             std::process::exit(1);
         }
     } else {
-        run_prompt(io_handle, &args)
+        let mut repl = Repl::new(io_handle, &args);
+        repl.run()
     }
 }
 
@@ -274,76 +95,22 @@ fn run_file(io_handle: Rc<TvixStoreIO>, mut path: PathBuf, args: &Args) {
     let success = if args.compile_only {
         lint(&contents, Some(path), args)
     } else {
-        interpret(io_handle, &contents, Some(path), args, false)
+        interpret(
+            io_handle,
+            &contents,
+            Some(path),
+            args,
+            false,
+            AllowIncomplete::RequireComplete,
+            None,
+            None,
+            None,
+        )
+        .unwrap()
+        .finalize()
     };
 
     if !success {
         std::process::exit(1);
     }
 }
-
-fn println_result(result: &Value, raw: bool) {
-    if raw {
-        println!("{}", result.to_contextful_str().unwrap())
-    } else {
-        println!("=> {} :: {}", result, result.type_of())
-    }
-}
-
-fn state_dir() -> Option<PathBuf> {
-    let mut path = dirs::data_dir();
-    if let Some(p) = path.as_mut() {
-        p.push("tvix")
-    }
-    path
-}
-
-fn run_prompt(io_handle: Rc<TvixStoreIO>, args: &Args) {
-    let mut rl = Editor::<()>::new().expect("should be able to launch rustyline");
-
-    if args.compile_only {
-        eprintln!("warning: `--compile-only` has no effect on REPL usage!");
-    }
-
-    let history_path = match state_dir() {
-        // Attempt to set up these paths, but do not hard fail if it
-        // doesn't work.
-        Some(mut path) => {
-            let _ = std::fs::create_dir_all(&path);
-            path.push("history.txt");
-            let _ = rl.load_history(&path);
-            Some(path)
-        }
-
-        None => None,
-    };
-
-    loop {
-        let readline = rl.readline("tvix-repl> ");
-        match readline {
-            Ok(line) => {
-                if line.is_empty() {
-                    continue;
-                }
-
-                rl.add_history_entry(&line);
-
-                if let Some(without_prefix) = line.strip_prefix(":d ") {
-                    interpret(Rc::clone(&io_handle), without_prefix, None, args, true);
-                } else {
-                    interpret(Rc::clone(&io_handle), &line, None, args, false);
-                }
-            }
-            Err(ReadlineError::Interrupted) | Err(ReadlineError::Eof) => break,
-
-            Err(err) => {
-                eprintln!("error: {}", err);
-                break;
-            }
-        }
-    }
-
-    if let Some(path) = history_path {
-        rl.save_history(&path).unwrap();
-    }
-}
diff --git a/tvix/cli/src/repl.rs b/tvix/cli/src/repl.rs
new file mode 100644
index 000000000000..e4b499609829
--- /dev/null
+++ b/tvix/cli/src/repl.rs
@@ -0,0 +1,274 @@
+use std::path::PathBuf;
+use std::rc::Rc;
+
+use rustc_hash::FxHashMap;
+use rustyline::{error::ReadlineError, Editor};
+use smol_str::SmolStr;
+use tvix_eval::{GlobalsMap, SourceCode, Value};
+use tvix_glue::tvix_store_io::TvixStoreIO;
+
+use crate::{
+    assignment::Assignment, evaluate, interpret, AllowIncomplete, Args, IncompleteInput,
+    InterpretResult,
+};
+
+fn state_dir() -> Option<PathBuf> {
+    let mut path = dirs::data_dir();
+    if let Some(p) = path.as_mut() {
+        p.push("tvix")
+    }
+    path
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub(crate) enum ReplCommand<'a> {
+    Expr(&'a str),
+    Assign(Assignment<'a>),
+    Explain(&'a str),
+    Print(&'a str),
+    Quit,
+    Help,
+}
+
+impl<'a> ReplCommand<'a> {
+    const HELP: &'static str = "
+Welcome to the Tvix REPL!
+
+The following commands are supported:
+
+  <expr>       Evaluate a Nix language expression and print the result, along with its inferred type
+  <x> = <expr> Bind the result of an expression to a variable
+  :d <expr>    Evaluate a Nix language expression and print a detailed description of the result
+  :p <expr>    Evaluate a Nix language expression and print the result recursively
+  :q           Exit the REPL
+  :?, :h       Display this help text
+";
+
+    pub fn parse(input: &'a str) -> Self {
+        if input.starts_with(':') {
+            if let Some(without_prefix) = input.strip_prefix(":d ") {
+                return Self::Explain(without_prefix);
+            } else if let Some(without_prefix) = input.strip_prefix(":p ") {
+                return Self::Print(without_prefix);
+            }
+
+            let input = input.trim_end();
+            match input {
+                ":q" => return Self::Quit,
+                ":h" | ":?" => return Self::Help,
+                _ => {}
+            }
+        }
+
+        if let Some(assignment) = Assignment::parse(input) {
+            return Self::Assign(assignment);
+        }
+
+        Self::Expr(input)
+    }
+}
+
+pub struct CommandResult {
+    output: String,
+    continue_: bool,
+}
+
+impl CommandResult {
+    pub fn finalize(self) -> bool {
+        print!("{}", self.output);
+        self.continue_
+    }
+
+    pub fn output(&self) -> &str {
+        &self.output
+    }
+}
+
+pub struct Repl<'a> {
+    /// In-progress multiline input, when the input so far doesn't parse as a complete expression
+    multiline_input: Option<String>,
+    rl: Editor<()>,
+    /// Local variables defined at the top-level in the repl
+    env: FxHashMap<SmolStr, Value>,
+
+    io_handle: Rc<TvixStoreIO>,
+    args: &'a Args,
+    source_map: SourceCode,
+    globals: Option<Rc<GlobalsMap>>,
+}
+
+impl<'a> Repl<'a> {
+    pub fn new(io_handle: Rc<TvixStoreIO>, args: &'a Args) -> Self {
+        let rl = Editor::<()>::new().expect("should be able to launch rustyline");
+        Self {
+            multiline_input: None,
+            rl,
+            env: FxHashMap::default(),
+            io_handle,
+            args,
+            source_map: Default::default(),
+            globals: None,
+        }
+    }
+
+    pub fn run(&mut self) {
+        if self.args.compile_only {
+            eprintln!("warning: `--compile-only` has no effect on REPL usage!");
+        }
+
+        let history_path = match state_dir() {
+            // Attempt to set up these paths, but do not hard fail if it
+            // doesn't work.
+            Some(mut path) => {
+                let _ = std::fs::create_dir_all(&path);
+                path.push("history.txt");
+                let _ = self.rl.load_history(&path);
+                Some(path)
+            }
+
+            None => None,
+        };
+
+        loop {
+            let prompt = if self.multiline_input.is_some() {
+                "         > "
+            } else {
+                "tvix-repl> "
+            };
+
+            let readline = self.rl.readline(prompt);
+            match readline {
+                Ok(line) => {
+                    if !self.send(line).finalize() {
+                        break;
+                    }
+                }
+                Err(ReadlineError::Interrupted) | Err(ReadlineError::Eof) => break,
+
+                Err(err) => {
+                    eprintln!("error: {}", err);
+                    break;
+                }
+            }
+        }
+
+        if let Some(path) = history_path {
+            self.rl.save_history(&path).unwrap();
+        }
+    }
+
+    /// Send a line of user input to the REPL. Returns a result indicating the output to show to the
+    /// user, and whether or not to continue
+    pub fn send(&mut self, line: String) -> CommandResult {
+        if line.is_empty() {
+            return CommandResult {
+                output: String::new(),
+                continue_: true,
+            };
+        }
+
+        let input = if let Some(mi) = &mut self.multiline_input {
+            mi.push('\n');
+            mi.push_str(&line);
+            mi
+        } else {
+            &line
+        };
+
+        let res = match ReplCommand::parse(input) {
+            ReplCommand::Quit => {
+                return CommandResult {
+                    output: String::new(),
+                    continue_: false,
+                };
+            }
+            ReplCommand::Help => {
+                println!("{}", ReplCommand::HELP);
+                Ok(InterpretResult::empty_success(None))
+            }
+            ReplCommand::Expr(input) => interpret(
+                Rc::clone(&self.io_handle),
+                input,
+                None,
+                self.args,
+                false,
+                AllowIncomplete::Allow,
+                Some(&self.env),
+                self.globals.clone(),
+                Some(self.source_map.clone()),
+            ),
+            ReplCommand::Assign(Assignment { ident, value }) => {
+                match evaluate(
+                    Rc::clone(&self.io_handle),
+                    &value.to_string(), /* FIXME: don't re-parse */
+                    None,
+                    self.args,
+                    AllowIncomplete::Allow,
+                    Some(&self.env),
+                    self.globals.clone(),
+                    Some(self.source_map.clone()),
+                ) {
+                    Ok(result) => {
+                        if let Some(value) = result.value {
+                            self.env.insert(ident.into(), value);
+                        }
+                        Ok(InterpretResult::empty_success(Some(result.globals)))
+                    }
+                    Err(incomplete) => Err(incomplete),
+                }
+            }
+            ReplCommand::Explain(input) => interpret(
+                Rc::clone(&self.io_handle),
+                input,
+                None,
+                self.args,
+                true,
+                AllowIncomplete::Allow,
+                Some(&self.env),
+                self.globals.clone(),
+                Some(self.source_map.clone()),
+            ),
+            ReplCommand::Print(input) => interpret(
+                Rc::clone(&self.io_handle),
+                input,
+                None,
+                &Args {
+                    strict: true,
+                    ..(self.args.clone())
+                },
+                false,
+                AllowIncomplete::Allow,
+                Some(&self.env),
+                self.globals.clone(),
+                Some(self.source_map.clone()),
+            ),
+        };
+
+        match res {
+            Ok(InterpretResult {
+                output,
+                globals,
+                success: _,
+            }) => {
+                self.rl.add_history_entry(input);
+                self.multiline_input = None;
+                if globals.is_some() {
+                    self.globals = globals;
+                }
+                CommandResult {
+                    output,
+                    continue_: true,
+                }
+            }
+            Err(IncompleteInput) => {
+                if self.multiline_input.is_none() {
+                    self.multiline_input = Some(line);
+                }
+                CommandResult {
+                    output: String::new(),
+                    continue_: true,
+                }
+            }
+        }
+    }
+}
diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/readDir/bar b/tvix/cli/tests/.skip-tree
index e69de29bb2d1..e69de29bb2d1 100644
--- a/tvix/eval/src/tests/nix_tests/notyetpassing/readDir/bar
+++ b/tvix/cli/tests/.skip-tree
diff --git a/tvix/cli/tests/import.nix b/tvix/cli/tests/import.nix
new file mode 100644
index 000000000000..d5d41d3917f3
--- /dev/null
+++ b/tvix/cli/tests/import.nix
@@ -0,0 +1 @@
+{}: import ./six.nix { }
diff --git a/tvix/cli/tests/repl.rs b/tvix/cli/tests/repl.rs
new file mode 100644
index 000000000000..7b9b9e34550a
--- /dev/null
+++ b/tvix/cli/tests/repl.rs
@@ -0,0 +1,98 @@
+use std::ffi::OsString;
+
+use clap::Parser;
+use expect_test::expect;
+use tvix_cli::init_io_handle;
+
+macro_rules! test_repl {
+    ($name:ident() {$($send:expr => $expect:expr;)*}) => {
+        #[test]
+        fn $name() {
+            let tokio_runtime = tokio::runtime::Runtime::new().unwrap();
+            let args = tvix_cli::Args::parse_from(vec![
+              OsString::from("tvix"),
+              OsString::from("--nix-search-path"),
+              OsString::from("nixpkgs=/tmp"),
+            ]);
+            let mut repl = tvix_cli::Repl::new(init_io_handle(&tokio_runtime, &args), &args);
+            $({
+                let result = repl.send($send.into());
+                $expect.assert_eq(result.output())
+                ;
+            })*
+        }
+    }
+}
+
+test_repl!(simple_expr_eval() {
+    "1" => expect![[r#"
+        => 1 :: int
+    "#]];
+});
+
+test_repl!(multiline_input() {
+    "{ x = 1; " => expect![[""]];
+    "y = 2; }" => expect![[r#"
+        => { x = 1; y = 2; } :: set
+    "#]];
+});
+
+test_repl!(bind_literal() {
+    "x = 1" => expect![[""]];
+    "x" => expect![[r#"
+        => 1 :: int
+    "#]];
+});
+
+test_repl!(bind_lazy() {
+    "x = { z = 1; }" => expect![[""]];
+    "x" => expect![[r#"
+        => { z = 1; } :: set
+    "#]];
+    "x.z" => expect![[r#"
+        => 1 :: int
+    "#]];
+    "x.z" => expect![[r#"
+        => 1 :: int
+    "#]];
+});
+
+test_repl!(bind_lazy_errors() {
+    r#"x = (_: "x" + 1)"# => expect![[""]];
+    "x null" => expect![[""]];
+});
+
+test_repl!(bind_referencing_import() {
+    "six = import ./tests/six.nix {}" => expect![[""]];
+    "six.six" => expect![[r#"
+        => 6 :: int
+    "#]];
+    "imported = import ./tests/import.nix"  => expect![[""]];
+    "(imported {}).six" => expect![[r#"
+        => 6 :: int
+    "#]];
+});
+
+test_repl!(deep_print() {
+    "builtins.map (x: x + 1) [ 1 2 3 ]" => expect![[r#"
+        => [ <CODE> <CODE> <CODE> ] :: list
+    "#]];
+    ":p builtins.map (x: x + 1) [ 1 2 3 ]" => expect![[r#"
+        => [ 2 3 4 ] :: list
+    "#]];
+});
+
+test_repl!(explain() {
+    ":d { x = 1; y = [ 2 3 4 ]; }" => expect![[r#"
+        => a 2-item attribute set
+    "#]];
+});
+
+test_repl!(reference_nix_path() {
+    "<nixpkgs>" => expect![[r#"
+        => /tmp :: path
+    "#]];
+    "<nixpkgs>" => expect![[r#"
+        => /tmp :: path
+    "#]];
+});
diff --git a/tvix/cli/tests/six.nix b/tvix/cli/tests/six.nix
new file mode 100644
index 000000000000..71ec46c407ba
--- /dev/null
+++ b/tvix/cli/tests/six.nix
@@ -0,0 +1 @@
+{}: { six = builtins.foldl' (x: y: x + y) 0 [ 1 2 3 ]; }
diff --git a/tvix/clippy.toml b/tvix/clippy.toml
index be7709684c6f..31952cc80ad1 100644
--- a/tvix/clippy.toml
+++ b/tvix/clippy.toml
@@ -1,6 +1,8 @@
 # prevents a false-positive lint on our types containing bytes::Bytes
 # https://rust-lang.github.io/rust-clippy/master/index.html#/mutable_key_type
 ignore-interior-mutability = [
+  # make sure to specify the originating type name, not re-exports!
   "bytes::Bytes",
-  "tvix_castore::digests::B3Digest"
+  "tvix_castore::digests::B3Digest",
+  "tvix_castore::path::component::PathComponent"
 ]
diff --git a/tvix/crate-hashes.json b/tvix/crate-hashes.json
index 2c1e740cb9b1..42b46c2515d2 100644
--- a/tvix/crate-hashes.json
+++ b/tvix/crate-hashes.json
@@ -1,4 +1,4 @@
 {
-  "git+https://github.com/flokli/bigtable_rs?rev=0af404741dfc40eb9fa99cf4d4140a09c5c20df7#0.2.9": "1njjam1lx2xlnm7a41lga8601vmjgqz0fvc77x24gd04pc7avxll",
+  "git+https://github.com/liufuyang/bigtable_rs?rev=1818355a5373a5bc2c84287e3a4e3807154ac8ef#0.2.10": "0mn6iw1z7gdxbarsqiwscbdr25nplwlvzs0rs51vgnnjfsnbgl6q",
   "git+https://github.com/tvlfyi/wu-manber.git#wu-manber@0.1.0": "1zhk83lbq99xzyjwphv2qrb8f8qgfqwa5bbbvyzm0z0bljsjv0pd"
 }
\ No newline at end of file
diff --git a/tvix/default.nix b/tvix/default.nix
index a3a4d35df60a..8e09c5b88346 100644
--- a/tvix/default.nix
+++ b/tvix/default.nix
@@ -2,79 +2,9 @@
 { pkgs, lib, depot, ... }:
 
 let
-  # crate override for crates that need protobuf
-  protobufDep = prev: (prev.nativeBuildInputs or [ ]) ++ [ pkgs.buildPackages.protobuf ];
-  iconvDarwinDep = lib.optional pkgs.stdenv.isDarwin pkgs.libiconv;
-
-  # On Darwin, some crates producing binaries need to be able to link against security.
-  darwinDeps = lib.optionals pkgs.stdenv.isDarwin (with pkgs.buildPackages.darwin.apple_sdk.frameworks; [
-    Security
-    SystemConfiguration
-  ]);
-
   # Load the crate2nix crate tree.
-  crates = import ./Cargo.nix {
-    inherit pkgs;
-    nixpkgs = pkgs.path;
-
-    # Hack to fix Darwin build
-    # See https://github.com/NixOS/nixpkgs/issues/218712
-    buildRustCrateForPkgs = pkgs:
-      if pkgs.stdenv.isDarwin then
-        let
-          buildRustCrate = pkgs.buildRustCrate;
-          buildRustCrate_ = args: buildRustCrate args // { dontStrip = true; };
-          override = o: args: buildRustCrate.override o (args // { dontStrip = true; });
-        in
-        pkgs.makeOverridable override { }
-      else pkgs.buildRustCrate;
-
-    defaultCrateOverrides = pkgs.defaultCrateOverrides // {
-      zstd-sys = prev: {
-        nativeBuildInputs = prev.nativeBuildInputs or [ ];
-        buildInputs = prev.buildInputs or [ ] ++ iconvDarwinDep;
-      };
-
-      opentelemetry-proto = prev: {
-        nativeBuildInputs = protobufDep prev;
-      };
-
-      prost-build = prev: {
-        nativeBuildInputs = protobufDep prev;
-      };
-
-      prost-wkt-types = prev: {
-        nativeBuildInputs = protobufDep prev;
-      };
-
-      tonic-reflection = prev: {
-        nativeBuildInputs = protobufDep prev;
-      };
-
-      tvix-build = prev: {
-        PROTO_ROOT = depot.tvix.build.protos.protos;
-        nativeBuildInputs = protobufDep prev;
-        buildInputs = darwinDeps;
-      };
-
-      tvix-castore = prev: {
-        PROTO_ROOT = depot.tvix.castore.protos.protos;
-        nativeBuildInputs = protobufDep prev;
-      };
-
-      tvix-cli = prev: {
-        buildInputs = prev.buildInputs or [ ] ++ darwinDeps;
-      };
-
-      tvix-store = prev: {
-        PROTO_ROOT = depot.tvix.store.protos.protos;
-        nativeBuildInputs = protobufDep prev;
-        # fuse-backend-rs uses DiskArbitration framework to handle mount/unmount on Darwin
-        buildInputs = prev.buildInputs or [ ]
-          ++ darwinDeps
-          ++ lib.optional pkgs.stdenv.isDarwin pkgs.buildPackages.darwin.apple_sdk.frameworks.DiskArbitration;
-      };
-    };
+  crates = pkgs.callPackage ./Cargo.nix {
+    defaultCrateOverrides = depot.tvix.utils.defaultCrateOverridesForPkgs pkgs;
   };
 
   # Cargo dependencies to be used with nixpkgs rustPlatform functions.
@@ -85,8 +15,8 @@ let
     # and the outputHash as value.
     outputHashes = builtins.listToAttrs
       (map
-        (crateName:
-          (lib.nameValuePair "${crateName}-${crates.internal.crates.${crateName}.version}" crates.internal.crates.${crateName}.src.outputHash)
+        (k:
+          (lib.nameValuePair "${crates.internal.crates.${k}.crateName}-${crates.internal.crates.${k}.version}" crates.internal.crates.${k}.src.outputHash)
         ) [
         "bigtable_rs"
         "wu-manber"
@@ -111,57 +41,6 @@ in
 {
   inherit crates protos;
 
-  # Run crate2nix generate, ensure the output doesn't differ afterwards
-  # (and doesn't fail).
-  #
-  # Currently this re-downloads every crate every time
-  # crate2nix-check (but not crate2nix) is built.
-  # TODO(amjoseph): be less wasteful with bandwidth.
-  #
-  crate2nix-check =
-    let
-      outputHashAlgo = "sha256";
-    in
-    pkgs.stdenv.mkDerivation {
-      inherit src;
-
-      # Important: we include the hash of the Cargo.lock file and
-      # Cargo.nix file in the derivation name.  This forces the FOD
-      # to be rebuilt/reverified whenever either of them changes.
-      name = "tvix-crate2nix-check-" +
-        (builtins.substring 0 8 (builtins.hashFile "sha256" ./Cargo.lock)) +
-        "-" +
-        (builtins.substring 0 8 (builtins.hashFile "sha256" ./Cargo.nix));
-
-      nativeBuildInputs = with pkgs; [ git cacert cargo ];
-      buildPhase = ''
-        export CARGO_HOME=$(mktemp -d)
-
-        # The following command can be omitted, in which case
-        # crate2nix-generate will run it automatically, but won't show the
-        # output, which makes it look like the build is somehow "stuck" for a
-        # minute or two.
-        cargo metadata > /dev/null
-
-        # running this command counteracts depotfmt brokenness
-        git init
-
-        ${depot.tools.crate2nix-generate}/bin/crate2nix-generate
-
-        # technically unnecessary, but provides more-helpful output in case of error
-        diff -ur Cargo.nix ${src}/Cargo.nix
-
-        # the FOD hash will check that the (re-)generated Cargo.nix matches the committed Cargo.nix
-        cp Cargo.nix $out
-      '';
-
-      # This is an FOD in order to allow `cargo` to perform network access.
-      outputHashMode = "flat";
-      inherit outputHashAlgo;
-      outputHash = builtins.hashFile outputHashAlgo ./Cargo.nix;
-      env.SSL_CERT_FILE = "${pkgs.cacert.out}/etc/ssl/certs/ca-bundle.crt";
-    };
-
   # Provide the Tvix logo in both .webp and .png format.
   logo = pkgs.runCommand "logo"
     {
@@ -197,7 +76,7 @@ in
 
     buildInputs = [
       pkgs.fuse
-    ] ++ iconvDarwinDep;
+    ] ++ lib.optional pkgs.stdenv.isDarwin pkgs.libiconv;
 
     buildPhase = ''
       cargo doc --document-private-items
@@ -227,10 +106,22 @@ in
     buildPhase = "cargo clippy --tests --all-features --benches --examples -- -Dwarnings | tee $out";
   };
 
+  crate2nix-check =
+    let
+      crate2nix-check = depot.tvix.utils.mkCrate2nixCheck ./Cargo.nix;
+    in
+    crate2nix-check.command.overrideAttrs {
+      meta.ci.extraSteps = {
+        inherit crate2nix-check;
+      };
+    };
+
   meta.ci.targets = [
     "clippy"
-    "crate2nix-check"
     "shell"
     "rust-docs"
+    "crate2nix-check"
   ];
+
+  utils = import ./utils.nix { inherit pkgs lib depot; };
 }
diff --git a/tvix/docs/book.toml b/tvix/docs/book.toml
index 7318a9023348..56bdd860cdde 100644
--- a/tvix/docs/book.toml
+++ b/tvix/docs/book.toml
@@ -9,3 +9,18 @@ title = "Tvix Docs"
 # override the /usr/bin/plantuml default
 plantuml-cmd = "plantuml"
 use-data-uris = true
+
+[preprocessor.admonish]
+command = "mdbook-admonish"
+after = ["links"] # ensure `{{#include}}` also gets processed
+assets_version = "3.0.2" # do not edit: managed by `mdbook-admonish install`
+
+[preprocessor.d2]
+command = "d2"
+after = ["links"] # ensure `{{#include}}` also gets processed
+
+[output]
+
+[output.html]
+additional-css = ["./mdbook-admonish.css", "./mdbook-extra.css"]
+additional-js = ["./mdbook-extra.js"]
diff --git a/tvix/docs/default.nix b/tvix/docs/default.nix
index 9fc2f765767a..3b102e4b7ce2 100644
--- a/tvix/docs/default.nix
+++ b/tvix/docs/default.nix
@@ -9,7 +9,10 @@ pkgs.stdenv.mkDerivation {
   src = lib.cleanSource ./.;
 
   nativeBuildInputs = [
+    pkgs.d2
     pkgs.mdbook
+    pkgs.mdbook-admonish
+    pkgs.mdbook-d2
     pkgs.mdbook-plantuml
     pkgs.plantuml
   ];
diff --git a/tvix/docs/mdbook-admonish.css b/tvix/docs/mdbook-admonish.css
new file mode 100644
index 000000000000..45aeff051115
--- /dev/null
+++ b/tvix/docs/mdbook-admonish.css
@@ -0,0 +1,348 @@
+@charset "UTF-8";
+:is(.admonition) {
+  display: flow-root;
+  margin: 1.5625em 0;
+  padding: 0 1.2rem;
+  color: var(--fg);
+  page-break-inside: avoid;
+  background-color: var(--bg);
+  border: 0 solid black;
+  border-inline-start-width: 0.4rem;
+  border-radius: 0.2rem;
+  box-shadow: 0 0.2rem 1rem rgba(0, 0, 0, 0.05), 0 0 0.1rem rgba(0, 0, 0, 0.1);
+}
+@media print {
+  :is(.admonition) {
+    box-shadow: none;
+  }
+}
+:is(.admonition) > * {
+  box-sizing: border-box;
+}
+:is(.admonition) :is(.admonition) {
+  margin-top: 1em;
+  margin-bottom: 1em;
+}
+:is(.admonition) > .tabbed-set:only-child {
+  margin-top: 0;
+}
+html :is(.admonition) > :last-child {
+  margin-bottom: 1.2rem;
+}
+
+a.admonition-anchor-link {
+  display: none;
+  position: absolute;
+  left: -1.2rem;
+  padding-right: 1rem;
+}
+a.admonition-anchor-link:link, a.admonition-anchor-link:visited {
+  color: var(--fg);
+}
+a.admonition-anchor-link:link:hover, a.admonition-anchor-link:visited:hover {
+  text-decoration: none;
+}
+a.admonition-anchor-link::before {
+  content: "ยง";
+}
+
+:is(.admonition-title, summary.admonition-title) {
+  position: relative;
+  min-height: 4rem;
+  margin-block: 0;
+  margin-inline: -1.6rem -1.2rem;
+  padding-block: 0.8rem;
+  padding-inline: 4.4rem 1.2rem;
+  font-weight: 700;
+  background-color: rgba(68, 138, 255, 0.1);
+  print-color-adjust: exact;
+  -webkit-print-color-adjust: exact;
+  display: flex;
+}
+:is(.admonition-title, summary.admonition-title) p {
+  margin: 0;
+}
+html :is(.admonition-title, summary.admonition-title):last-child {
+  margin-bottom: 0;
+}
+:is(.admonition-title, summary.admonition-title)::before {
+  position: absolute;
+  top: 0.625em;
+  inset-inline-start: 1.6rem;
+  width: 2rem;
+  height: 2rem;
+  background-color: #448aff;
+  print-color-adjust: exact;
+  -webkit-print-color-adjust: exact;
+  mask-image: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"></svg>');
+  -webkit-mask-image: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"></svg>');
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-size: contain;
+  content: "";
+}
+:is(.admonition-title, summary.admonition-title):hover a.admonition-anchor-link {
+  display: initial;
+}
+
+details.admonition > summary.admonition-title::after {
+  position: absolute;
+  top: 0.625em;
+  inset-inline-end: 1.6rem;
+  height: 2rem;
+  width: 2rem;
+  background-color: currentcolor;
+  mask-image: var(--md-details-icon);
+  -webkit-mask-image: var(--md-details-icon);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-size: contain;
+  content: "";
+  transform: rotate(0deg);
+  transition: transform 0.25s;
+}
+details[open].admonition > summary.admonition-title::after {
+  transform: rotate(90deg);
+}
+
+:root {
+  --md-details-icon: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M8.59 16.58 13.17 12 8.59 7.41 10 6l6 6-6 6-1.41-1.42Z'/></svg>");
+}
+
+:root {
+  --md-admonition-icon--admonish-note: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M20.71 7.04c.39-.39.39-1.04 0-1.41l-2.34-2.34c-.37-.39-1.02-.39-1.41 0l-1.84 1.83 3.75 3.75M3 17.25V21h3.75L17.81 9.93l-3.75-3.75L3 17.25z'/></svg>");
+  --md-admonition-icon--admonish-abstract: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M17 9H7V7h10m0 6H7v-2h10m-3 6H7v-2h7M12 3a1 1 0 0 1 1 1 1 1 0 0 1-1 1 1 1 0 0 1-1-1 1 1 0 0 1 1-1m7 0h-4.18C14.4 1.84 13.3 1 12 1c-1.3 0-2.4.84-2.82 2H5a2 2 0 0 0-2 2v14a2 2 0 0 0 2 2h14a2 2 0 0 0 2-2V5a2 2 0 0 0-2-2z'/></svg>");
+  --md-admonition-icon--admonish-info: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M13 9h-2V7h2m0 10h-2v-6h2m-1-9A10 10 0 0 0 2 12a10 10 0 0 0 10 10 10 10 0 0 0 10-10A10 10 0 0 0 12 2z'/></svg>");
+  --md-admonition-icon--admonish-tip: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M17.66 11.2c-.23-.3-.51-.56-.77-.82-.67-.6-1.43-1.03-2.07-1.66C13.33 7.26 13 4.85 13.95 3c-.95.23-1.78.75-2.49 1.32-2.59 2.08-3.61 5.75-2.39 8.9.04.1.08.2.08.33 0 .22-.15.42-.35.5-.23.1-.47.04-.66-.12a.58.58 0 0 1-.14-.17c-1.13-1.43-1.31-3.48-.55-5.12C5.78 10 4.87 12.3 5 14.47c.06.5.12 1 .29 1.5.14.6.41 1.2.71 1.73 1.08 1.73 2.95 2.97 4.96 3.22 2.14.27 4.43-.12 6.07-1.6 1.83-1.66 2.47-4.32 1.53-6.6l-.13-.26c-.21-.46-.77-1.26-.77-1.26m-3.16 6.3c-.28.24-.74.5-1.1.6-1.12.4-2.24-.16-2.9-.82 1.19-.28 1.9-1.16 2.11-2.05.17-.8-.15-1.46-.28-2.23-.12-.74-.1-1.37.17-2.06.19.38.39.76.63 1.06.77 1 1.98 1.44 2.24 2.8.04.14.06.28.06.43.03.82-.33 1.72-.93 2.27z'/></svg>");
+  --md-admonition-icon--admonish-success: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='m9 20.42-6.21-6.21 2.83-2.83L9 14.77l9.88-9.89 2.83 2.83L9 20.42z'/></svg>");
+  --md-admonition-icon--admonish-question: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='m15.07 11.25-.9.92C13.45 12.89 13 13.5 13 15h-2v-.5c0-1.11.45-2.11 1.17-2.83l1.24-1.26c.37-.36.59-.86.59-1.41a2 2 0 0 0-2-2 2 2 0 0 0-2 2H8a4 4 0 0 1 4-4 4 4 0 0 1 4 4 3.2 3.2 0 0 1-.93 2.25M13 19h-2v-2h2M12 2A10 10 0 0 0 2 12a10 10 0 0 0 10 10 10 10 0 0 0 10-10c0-5.53-4.5-10-10-10z'/></svg>");
+  --md-admonition-icon--admonish-warning: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M13 14h-2V9h2m0 9h-2v-2h2M1 21h22L12 2 1 21z'/></svg>");
+  --md-admonition-icon--admonish-failure: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M20 6.91 17.09 4 12 9.09 6.91 4 4 6.91 9.09 12 4 17.09 6.91 20 12 14.91 17.09 20 20 17.09 14.91 12 20 6.91z'/></svg>");
+  --md-admonition-icon--admonish-danger: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M11 15H6l7-14v8h5l-7 14v-8z'/></svg>");
+  --md-admonition-icon--admonish-bug: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M14 12h-4v-2h4m0 6h-4v-2h4m6-6h-2.81a5.985 5.985 0 0 0-1.82-1.96L17 4.41 15.59 3l-2.17 2.17a6.002 6.002 0 0 0-2.83 0L8.41 3 7 4.41l1.62 1.63C7.88 6.55 7.26 7.22 6.81 8H4v2h2.09c-.05.33-.09.66-.09 1v1H4v2h2v1c0 .34.04.67.09 1H4v2h2.81c1.04 1.79 2.97 3 5.19 3s4.15-1.21 5.19-3H20v-2h-2.09c.05-.33.09-.66.09-1v-1h2v-2h-2v-1c0-.34-.04-.67-.09-1H20V8z'/></svg>");
+  --md-admonition-icon--admonish-example: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M7 13v-2h14v2H7m0 6v-2h14v2H7M7 7V5h14v2H7M3 8V5H2V4h2v4H3m-1 9v-1h3v4H2v-1h2v-.5H3v-1h1V17H2m2.25-7a.75.75 0 0 1 .75.75c0 .2-.08.39-.21.52L3.12 13H5v1H2v-.92L4 11H2v-1h2.25z'/></svg>");
+  --md-admonition-icon--admonish-quote: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M14 17h3l2-4V7h-6v6h3M6 17h3l2-4V7H5v6h3l-2 4z'/></svg>");
+}
+
+:is(.admonition):is(.admonish-note) {
+  border-color: #448aff;
+}
+
+:is(.admonish-note) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(68, 138, 255, 0.1);
+}
+:is(.admonish-note) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #448aff;
+  mask-image: var(--md-admonition-icon--admonish-note);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-note);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-abstract, .admonish-summary, .admonish-tldr) {
+  border-color: #00b0ff;
+}
+
+:is(.admonish-abstract, .admonish-summary, .admonish-tldr) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(0, 176, 255, 0.1);
+}
+:is(.admonish-abstract, .admonish-summary, .admonish-tldr) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #00b0ff;
+  mask-image: var(--md-admonition-icon--admonish-abstract);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-abstract);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-info, .admonish-todo) {
+  border-color: #00b8d4;
+}
+
+:is(.admonish-info, .admonish-todo) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(0, 184, 212, 0.1);
+}
+:is(.admonish-info, .admonish-todo) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #00b8d4;
+  mask-image: var(--md-admonition-icon--admonish-info);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-info);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-tip, .admonish-hint, .admonish-important) {
+  border-color: #00bfa5;
+}
+
+:is(.admonish-tip, .admonish-hint, .admonish-important) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(0, 191, 165, 0.1);
+}
+:is(.admonish-tip, .admonish-hint, .admonish-important) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #00bfa5;
+  mask-image: var(--md-admonition-icon--admonish-tip);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-tip);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-success, .admonish-check, .admonish-done) {
+  border-color: #00c853;
+}
+
+:is(.admonish-success, .admonish-check, .admonish-done) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(0, 200, 83, 0.1);
+}
+:is(.admonish-success, .admonish-check, .admonish-done) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #00c853;
+  mask-image: var(--md-admonition-icon--admonish-success);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-success);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-question, .admonish-help, .admonish-faq) {
+  border-color: #64dd17;
+}
+
+:is(.admonish-question, .admonish-help, .admonish-faq) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(100, 221, 23, 0.1);
+}
+:is(.admonish-question, .admonish-help, .admonish-faq) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #64dd17;
+  mask-image: var(--md-admonition-icon--admonish-question);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-question);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-warning, .admonish-caution, .admonish-attention) {
+  border-color: #ff9100;
+}
+
+:is(.admonish-warning, .admonish-caution, .admonish-attention) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(255, 145, 0, 0.1);
+}
+:is(.admonish-warning, .admonish-caution, .admonish-attention) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #ff9100;
+  mask-image: var(--md-admonition-icon--admonish-warning);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-warning);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-failure, .admonish-fail, .admonish-missing) {
+  border-color: #ff5252;
+}
+
+:is(.admonish-failure, .admonish-fail, .admonish-missing) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(255, 82, 82, 0.1);
+}
+:is(.admonish-failure, .admonish-fail, .admonish-missing) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #ff5252;
+  mask-image: var(--md-admonition-icon--admonish-failure);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-failure);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-danger, .admonish-error) {
+  border-color: #ff1744;
+}
+
+:is(.admonish-danger, .admonish-error) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(255, 23, 68, 0.1);
+}
+:is(.admonish-danger, .admonish-error) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #ff1744;
+  mask-image: var(--md-admonition-icon--admonish-danger);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-danger);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-bug) {
+  border-color: #f50057;
+}
+
+:is(.admonish-bug) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(245, 0, 87, 0.1);
+}
+:is(.admonish-bug) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #f50057;
+  mask-image: var(--md-admonition-icon--admonish-bug);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-bug);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-example) {
+  border-color: #7c4dff;
+}
+
+:is(.admonish-example) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(124, 77, 255, 0.1);
+}
+:is(.admonish-example) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #7c4dff;
+  mask-image: var(--md-admonition-icon--admonish-example);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-example);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-quote, .admonish-cite) {
+  border-color: #9e9e9e;
+}
+
+:is(.admonish-quote, .admonish-cite) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(158, 158, 158, 0.1);
+}
+:is(.admonish-quote, .admonish-cite) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #9e9e9e;
+  mask-image: var(--md-admonition-icon--admonish-quote);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-quote);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+.navy :is(.admonition) {
+  background-color: var(--sidebar-bg);
+}
+
+.ayu :is(.admonition),
+.coal :is(.admonition) {
+  background-color: var(--theme-hover);
+}
+
+.rust :is(.admonition) {
+  background-color: var(--sidebar-bg);
+  color: var(--sidebar-fg);
+}
+.rust .admonition-anchor-link:link, .rust .admonition-anchor-link:visited {
+  color: var(--sidebar-fg);
+}
diff --git a/tvix/docs/mdbook-extra.css b/tvix/docs/mdbook-extra.css
new file mode 100644
index 000000000000..7a50fdbeed68
--- /dev/null
+++ b/tvix/docs/mdbook-extra.css
@@ -0,0 +1,7 @@
+@charset "utf-8";
+
+.hljs-meta.prompt_ {
+	-webkit-user-select: none;
+	-moz-user-select: none;
+	user-select: none;
+}
diff --git a/tvix/docs/mdbook-extra.js b/tvix/docs/mdbook-extra.js
new file mode 100644
index 000000000000..bff412e69eb2
--- /dev/null
+++ b/tvix/docs/mdbook-extra.js
@@ -0,0 +1,24 @@
+"use strict"
+
+function main() {
+	document.removeEventListener("DOMContentLoaded", main, false)
+
+	// NOTE: this is a hacky solution but should do the job for making a
+	// listenable audio link
+	function playAudio(evt) {
+		evt.preventDefault()
+		var audio = document.createElement("audio")
+		audio.addEventListener("ended", function() { audio.delete() }, false)
+		audio.addEventListener("loadeddata", function() { audio.play() }, false)
+		audio.src = evt.target.href
+		audio.load()
+	}
+
+	var audios = document.querySelectorAll("a[href^=\"data:audio/\"]")
+	Array.prototype.forEach.call(audios, function setupAudio(elem) {
+		elem.setAttribute("role", "button")
+		elem.addEventListener("click", playAudio, false)
+	})
+}
+
+document.addEventListener("DOMContentLoaded", main, false)
diff --git a/tvix/docs/src/SUMMARY.md b/tvix/docs/src/SUMMARY.md
index 954abae338ba..cce6d8966df6 100644
--- a/tvix/docs/src/SUMMARY.md
+++ b/tvix/docs/src/SUMMARY.md
@@ -1,14 +1,49 @@
 # Summary
 
+# Welcome
+* [Introduction](./introduction.md)
+* [Community](./community.md)
+* [Getting Started](./getting-started.md)
+
+# Contributing
+* [Gerrit](./contributing/gerrit.md)
+* [Email](./contributing/email.md)
+* [Code & Commits](./contributing/code-&-commits.md)
+
 # Tvix
 - [Architecture & data flow](./architecture.md)
 - [TODOs](./TODO.md)
 
+# Evaluator
+- [Compilation of Bindings](./eval/bindings.md)
+- [Builtins](./eval/builtins.md)
+- [Build References](./eval/build-references.md)
+- [Catchable Errors](./eval/catchable-errors.md)
+- [Known Optimisation Potential](./eval/known-optimisation-potential.md)
+- [Langugage Issues](./eval/language-issues.md)
+- [Attrset Opcodes](./eval/opcodes-attrsets.md)
+- [Recursive attribute sets](./eval/recursive-attrs.md)
+- [VM Loop](./eval/vm-loop.md)
+- [Abandoned](./eval/abandoned/index.md)
+  - [Thread-local VM](./eval/abandoned/thread-local-vm.md)
+
+# Store
+- [Store API](./store/api.md)
+- [BlobStore Chunking](./castore/blobstore-chunking.md)
+- [BlobStore Protocol](./castore/blobstore-protocol.md)
+- [Data Model](./castore/data-model.md)
+- [Why not git trees?](./castore/why-not-git-trees.md)
+
+# Builder
+- [Build API](./build/index.md)
+
 # Nix
 - [Specification of the Nix Language](./language-spec.md)
 - [Nix language version history](./lang-version.md)
 - [Value Pointer Equality](./value-pointer-equality.md)
-- [Daemon protocol changelog](./nix-daemon/changelog.md)
-- [Daemon protocol logging](./nix-daemon/logging.md)
-- [Daemon protocol operations](./nix-daemon/operations.md)
-- [Daemon protocol serialization](./nix-daemon/serialization.md)
\ No newline at end of file
+- [Daemon Protocol](./nix-daemon/index.md)
+  - [Handshake](./nix-daemon/handshake.md)
+  - [Logging](./nix-daemon/logging.md)
+  - [Operations](./nix-daemon/operations.md)
+  - [Serialization](./nix-daemon/serialization.md)
+  - [Changelog](./nix-daemon/changelog.md)
diff --git a/tvix/docs/src/TODO.md b/tvix/docs/src/TODO.md
index 8fb22ea82295..af558c9580fe 100644
--- a/tvix/docs/src/TODO.md
+++ b/tvix/docs/src/TODO.md
@@ -25,17 +25,75 @@ sure noone is working on this, or has some specific design in mind already.
    with a different level of `--strict`, but the toplevel doc-comment suggests
    its generic?
 
+### Correctness > Performance
+A lot of the Nix behaviour isn't well documented out, and before going too deep
+into performance optimizations, we need to ensure we properly grasped all hidden
+features. This is to avoid doing a lot of "overall architecture perf-related
+work" and increased code complexity based on a mental model that might get
+disproved later on, as we work towards correctness.
+
+We do this by evaluating more and more parts of the official Nix test suite, as
+well as our own Tvix test suite, and compare it with Nix' output.
+
+Additionally, we evaluate attributes from nixpkgs, compare calculated output
+paths (to determine equivalence of evaluated A-Terms) and fix differences as we
+encounter them.
+
+This currently is a very manual and time-consuming process, both in terms of
+setup, as well as spotting the source of the differences (and "compensating" for
+the resulting diff noise on resulting mismtaches).
+
+ - We could use some better tooling that periodically evaluates nixpkgs, and
+   compares the output paths with the ones produced by Nix
+ - We could use some better tooling that can spot the (real) differences between
+   two (graphs of) derivations, while removing all resulting noise from the diff
+in resulting store paths.
+
+
+### Performance
+Even while keeping in mind some of the above caveats, there's some obvious
+low-langing fruits that could have a good impact on performance, with somewhat
+limited risk of becoming obsolete in case of behaviorial changes due to
+correctness:
+
+ - String Contexts currently do a lot of indirections (edef)
+   (NixString -> NixStringInner -> HashSet[element] -> NixContextElement -> String -> data)
+   to get to the actual data. We should improve this. There's various ideas, one
+   of it is globally interning all Nix context elements, and only keeping
+   indices into that. We might need to have different representations for small
+   amount of context elements or larger ones, and need tooling to reason about
+   the amount of contexts we have.
+ - To calculate NAR size and digest (used for output path calculation of FODs),
+   our current `SimpleRenderer` `NarCalculationService` sequentially asks for
+   one blob after another (and internally these might consists out of multiple
+   chunks too).
+   That's a lot of roundtrips, adding up to a lot of useless waiting.
+   While we cannot avoid having to feed all bytes sequentially through sha256,
+   we already know what blobs to fetch and in which order.
+   There should be a way to buffer some "amount of upcoming bytes" in memory,
+   and not requesting these seqentially.
+   This is somewhat the "spiritual counterpart" to our sequential ingestion
+   code (`ConcurrentBlobUploader`, used by `ingest_nar`), which keeps
+   "some amount of outgoing bytes" in memory.
+   This is somewhat blocked until the {Chunk/Blob}Service split is done, as then
+   prefetching would only be a matter of adding it into the one `BlobReader`.
+
 ### Error cleanup
  - Currently, all services use tvix_castore::Error, which only has two kinds
    (invalid request, storage error), containing an (owned) string.
    This is quite primitive. We should have individual error types for BS, DS, PS.
    Maybe these should have some generics to still be able to carry errors from
    the underlying backend, similar to `IngestionError`.
+   There was an attempt to give PS separate error types (cl/11695), but this
+   ended up very verbose.
+   Every error had to be boxed, and a possible additional message be added. Some
+   errors that didn't wrap another underlying errors were hard to construct, too
+   (requiring the addition of errors). All of this without even having added
+   proper backtrace support, which would be quite helpful in store hierarchies.
+   `anyhow`'s `.context()` gives us most of this out of the box. Maybe we can
+   use that, using enums rather than `&'static str` as context in some cases?
 
 ## Fixes towards correctness
- - `builtins.toXML` is missing string context. See b/398.
- - `builtins.toXML` self-closing tags need to be configurable in a more granular
-   fashion, requires third-party crate support. See b/399.
  - `rnix` only supports string source files, but `NixString` uses bytes (and Nix
    source code might be no valid UTF-8).
 
@@ -63,22 +121,12 @@ Extend the other pages in here. Some ideas on what should be tackled:
 
 ## Features
 
-### CLI
- - `nix repl` can set variables and effectively mutates a global scope. We
-  should update the existing / add another repl that allows the same. We don't
-  want to mutate the evaluator, but should construct a new one, passing in the
-  root scope returned from the previous evaluation.
-
 ### Fetchers
 Some more fetcher-related builtins need work:
  - `fetchGit`
  - `fetchTree` (hairy, seems there's no proper spec and the URL syntax seems
    subject to change/underdocumented)
 
-### Convert builtins:fetchurl to Fetches
-We need to convert `builtins:fetchurl`-style calls to `builtins.derivation` to
-fetches, not Derivations (tracked in `KnownPaths`).
-
 ### Derivation -> Build
 While we have some support for `structuredAttrs` and `fetchClosure` (at least
 enough to calculate output hashes, aka produce identical ATerm), the code
@@ -96,14 +144,44 @@ there are ruled out, adding other types of builders might be interesting.
  - gVisor
  - Cloud Hypervisor (using similar technique as `//tvix//boot`).
 
-Long-term, we want to extend traits and gRPC protocol to expose more telemetry,
-logs etc, but this is something requiring a lot of designing.
+Long-term, we want to extend traits and gRPC protocol.
+This requires some more designing. Some goals:
+
+ - use stricter castore types (and maybe stricter build types) instead of
+   proto types, add conversion code where necessary
+ - (more granular) control while a build is happening
+ - expose more telemetry and logs
+
 
 ### Store composition
  - Combinators: list-by-priority, first-come-first-serve, cache
- - How do describe hierarchies. URL format too one-dimensional, but we might get
-   quite far with a similar "substituters" concept that Nix uses, to construct
-   the composed stores.
+ - Store composition hierarchies (@yuka).
+   - URL format too one-dimensional.
+   - We want to have nice and simple user-facing substituter config, including
+     sensible default wrappers for caching, retries, fallbacks, as well as
+     granular control for power-users.
+   - Current design idea:
+     - Have a concept similar to rclone config (map with store aliases as
+       keys, allowing to refer to stores by their alias from other parts of
+       the config).
+       It allows both referring to by name, as well as ad-hoc definition:
+       https://rclone.org/docs/#syntax-of-remote-paths
+     - Each store needs to be aware of its "instance name", so it can be
+       included in logs, metrics, โ€ฆ
+     - Have a "instantiation function" traversing such a config data structure,
+       creating store instances and plugging them together, ultimately returning
+       a dyn โ€ฆService interface.
+     - No reconfiguration/reconcilation for now
+     - Making URLs the primary data format would get ugly quite easily (hello
+       multiple layers of escaping!), so best to convert the existing URL
+       syntax to our new config format on the fly and then use one codepath
+       to instantiate/assemble. Similarly, something like the "user-facing
+       substituter config" mentioned above could aalso be converted to such a
+       config format under the hood.
+     - Maybe add a ?cache=$other_url parameter support to the URL syntax, to
+       easily wrap a store with a caching frontend, using $other_url as the
+      "near" store URL.
+
 ### Store Config
    There's already serde for some store options (bigtable uses `serde_qs`).
    We might also have common options global over all backends, like chunking
@@ -114,29 +192,36 @@ logs etc, but this is something requiring a lot of designing.
 ### BlobService
  - On the trait side, currently there's no way to distinguish reading a
    known-chunk vs blob, so we might be calling `.chunks()` unnecessarily often.
-   At least for the `object_store` backend, this might be a problem.
+   At least for the `object_store` backend, this might be a problem, causing a
+   lot of round-trips. It also doesn't compose well - every implementation of
+   `BlobService` needs to both solve the "holding metadata about chunking info"
+   as well as "storing chunks" questions.
+   Design idea (@flokli): split these two concerns into two separate traits:
+    - a `ChunkService` dealing with retrieving individual chunks, by their
+      content digests. Chunks are small enough to keep around in contiguous
+      memory.
+    - a `BlobService` storing metadata about blobs.
+
+   Individual stores would not need to implement `BlobReader` anymore, but that
+   could be a global thing with access to the whole store composition layer,
+   which should make it easier to reuse chunks from other backends. Unclear
+   if the write path should be structured the same way. At least for some
+   backends, we want the remote end to be able to decide about chunking.
+
  - While `object_store` recently got support for `Content-Type`
    (https://github.com/apache/arrow-rs/pull/5650), there's no support on the
    local filesystem yet. We'd need to add support to this (through xattrs).
 
-### DirectoryService
- - Add an `object_store` variant, storing a Directory *closure* keyed by the
-   root `Directory` digest. This won't allow indexing intermediate Directory
-   nodes, but once we have `DirectoryService` composition, it shouldn't be an
-   issue.
- - [redb](https://www.redb.org/) backend
-
 ### PathInfoService
- - [redb](https://www.redb.org/) backend
  - sqlite backend (different schema than the Nix one, we need the root nodes data!)
 
 ### Nix Daemon protocol
 - Some work ongoing on the worker operation parsing (griff, picnoir)
 
 ### O11Y
- - gRPC trace propagation (cl/10532)
- - `tracing-tracy` (cl/10952)
- - `[tracing-]indicatif` for progress/log reporting (floklis stash)
- - unification into `tvix-tracing` crate, currently a lot of boilerplate
-   in `tvix-store` CLI entrypoint, and half of the boilerplate copied over to
-   `tvix-cli`.
+ - Maybe drop `--log-level` entirely, and only use `RUST_LOG` env exclusively?
+   `debug`,`trace` level across all crates is a bit useless, and `RUST_LOG` can
+   be much more granularโ€ฆ
+ - Trace propagation for object_store once they support a way to register a
+   middleware, so we can use that to register a tracing middleware.
+   https://github.com/apache/arrow-rs/issues/5990
diff --git a/tvix/docs/src/architecture.md b/tvix/docs/src/architecture.md
index 5e0aa95f1a08..02ffdfdcd2b0 100644
--- a/tvix/docs/src/architecture.md
+++ b/tvix/docs/src/architecture.md
@@ -21,6 +21,15 @@ gRPC. The rest of this document outlines the components.
 
 ### Coordinator
 
+```admonish warning
+Currently there's no separate coordinator. Most of the interaction between
+store, builder and evaluator is done by library code living in the `tvix-glue`
+crate (and `tvix-cli` is a user of it).
+
+Keep in mind some of the statements below are outdated and neither reflect
+reality nor desired design anymore.
+```
+
 *Purpose:* The coordinator (in the simplest case, the Tvix CLI tool)
 oversees the flow of a build process and delegates tasks to the right
 subcomponents. For example, if a user runs the equivalent of
diff --git a/tvix/docs/src/build/index.md b/tvix/docs/src/build/index.md
new file mode 100644
index 000000000000..cf9580c98a82
--- /dev/null
+++ b/tvix/docs/src/build/index.md
@@ -0,0 +1,59 @@
+# Builder Protocol
+
+The builder protocol is used by tvix-glue to trigger builds.
+
+One goal of the protocol is to not be too tied to the Nix implementation itself,
+allowing it to be used for other builds/workloads in the future.
+
+This means the builder protocol is versatile enough to express the environment a
+Nix build expects, while not being aware of "what any of this means".
+
+For example, it is not aware of how certain environment variables are set in a
+nix build, but allows specifying environent variables that should be set.
+
+It's also not aware of what nix store paths are. Instead, it allows:
+
+ - specifying a list of paths expected to be produced during the build
+ - specifying a list of castore root nodes to be present in a specified
+   `inputs_dir`.
+ - specifying which paths are write-able during build.
+
+In case all specified paths are produced, and the command specified in
+`command_args` succeeds, the build is considered to be successful.
+
+This happens to be sufficient to *also* express how Nix builds works.
+
+Check `build/protos/build.proto` for a detailed description of the individual
+fields, and the tests in `glue/src/tvix_build.rs` for some examples.
+
+The following sections describe some aspects of Nix builds, and how this is
+(planned to be) implemented with the Tvix Build protocol.
+
+## Reference scanning
+At the end of a build, Nix does scan a store path for references to other store
+paths (*out of the set of all store paths present during the build*).
+It does do this by (only) looking for a list of nixbase32-encoded hashes in
+filenames (?), symlink targets and blob contents.
+
+While we could do this entirely outside the builder protocol, it'd mean a build
+client would be required to download the produced outputs locally, and do the
+refscan there. This is undesireable, as the builder already has all produced
+outputs locally, and it'd make more sense for it do do it.
+
+Instead, we want to describe reference scanning in a generic fashion.
+
+One proposed way to do this is to add an additional field `refscan_needles` to
+the `BuildRequest` message.
+If this is an non-empty list, all paths in `outputs` are scanned for these.
+
+The `Build` response message would then be extended with an `outputs_needles`
+field, containing the same number of elements as the existing `outputs` field.
+In there, we'd have a list of numbers, indexing into `refscan_needles`
+originally specified.
+
+For Nix, `refscan_needles` would be populated with the nixbase32 hash parts of
+every input store path and output store path. The latter is necessary to scan
+for references between multi-output derivations.
+
+This is sufficient to construct the referred store paths in each build output on
+the build client.
diff --git a/tvix/castore/docs/blobstore-chunking.md b/tvix/docs/src/castore/blobstore-chunking.md
index 49bbe6927554..d8c3d54b52f0 100644
--- a/tvix/castore/docs/blobstore-chunking.md
+++ b/tvix/docs/src/castore/blobstore-chunking.md
@@ -73,11 +73,11 @@ This means one only needs to the root digest to validate a constructions, and th
 constructions can be sent [separately][bao-spec].
 
 This relieves us from the need of having to encode more granular chunking into
-our data model / identifier upfront, but can make this a mostly a transport/
+our data model / identifier upfront, but can make this mostly a transport/
 storage concern.
 
-For the some more description on the (remote) protocol, check
-`./blobstore-protocol.md`.
+For some more description on the (remote) protocol, check
+[BlobStore Protocol](./blobstore-protocol.md).
 
 #### Logical vs. physical chunking
 
diff --git a/tvix/castore/docs/blobstore-protocol.md b/tvix/docs/src/castore/blobstore-protocol.md
index 048cafc3d877..0dff787ccb00 100644
--- a/tvix/castore/docs/blobstore-protocol.md
+++ b/tvix/docs/src/castore/blobstore-protocol.md
@@ -41,7 +41,7 @@ It also puts very little requirements on someone implementing a new
 The gRPC protocol is documented in `../protos/rpc_blobstore.proto`.
 Contrary to the `BlobService` trait, it does not have any options for seeking/
 ranging, as it's more desirable to provide this through chunking (see also
-`./blobstore-chunking.md`).
+[BlobStore Chunking](./blobstore-chunking.md).
 
 ## Composition
 Different `BlobStore` are supposed to be "composed"/"layered" to express
@@ -76,7 +76,7 @@ an additional additional field in the response, which would allow clients to
 populate their local chunk store in a single roundtrip.
 
 ## Verified Streaming
-As already described in `./docs/blobstore-chunking.md`, the physical chunk
+As already described in [BlobStore Chunking](./blobstore-chunking.md), the physical chunk
 information sent in a `BlobService.Stat()` response is still sufficient to fetch
 in an authenticated fashion.
 
diff --git a/tvix/castore/docs/data-model.md b/tvix/docs/src/castore/data-model.md
index 2df6761aae8f..7f7e396a2267 100644
--- a/tvix/castore/docs/data-model.md
+++ b/tvix/docs/src/castore/data-model.md
@@ -2,7 +2,7 @@
 
 This provides some more notes on the fields used in castore.proto.
 
-See `//tvix/store/docs/api.md` for the full context.
+See [Store API](../store/api.md) for the full context.
 
 ## Directory message
 `Directory` messages use the blake3 hash of their canonical protobuf
@@ -15,8 +15,8 @@ a directory.
 
 All three message types have a `name` field, specifying the (base)name of the
 element (which MUST not contain slashes or null bytes, and MUST not be '.' or '..').
-For reproducibility reasons, the lists MUST be sorted by that name and also
-MUST be unique across all three lists.
+For reproducibility reasons, the lists MUST be sorted by that name and the
+name MUST be unique across all three lists.
 
 In addition to the `name` field, the various *Node messages have the following
 fields:
@@ -27,7 +27,7 @@ A `DirectoryNode` message represents a child directory.
 It has a `digest` field, which points to the identifier of another `Directory`
 message, making a `Directory` a merkle tree (or strictly speaking, a graph, as
 two elements pointing to a child directory with the same contents would point
-to the same `Directory` message.
+to the same `Directory` message).
 
 There's also a `size` field, containing the (total) number of all child
 elements in the referenced `Directory`, which helps for inode calculation.
diff --git a/tvix/castore/docs/why-not-git-trees.md b/tvix/docs/src/castore/why-not-git-trees.md
index fd46252cf55c..4a12b4ef5554 100644
--- a/tvix/castore/docs/why-not-git-trees.md
+++ b/tvix/docs/src/castore/why-not-git-trees.md
@@ -48,7 +48,7 @@ The git tree object format uses sha1 both for references to other trees and
 hashes of blobs, which isn't really a hash function to fundamentally base
 everything on in 2023.
 The [migration to sha256][git-sha256] also has been dead for some years now,
-and it's unclear how a "blake3" version of this would even look like.
+and it's unclear what a "blake3" version of this would even look like.
 
 [bao]: https://github.com/oconnor663/bao
 [blake3]: https://github.com/BLAKE3-team/BLAKE3
diff --git a/tvix/docs/src/community.md b/tvix/docs/src/community.md
new file mode 100644
index 000000000000..9ab117c552f3
--- /dev/null
+++ b/tvix/docs/src/community.md
@@ -0,0 +1,23 @@
+# Community
+
+## Chatroom
+
+Tvix development discussions happen on IRC. We use the [hackint][] IRC network
+where you should be able to join using your favorite client/protocol.
+
+* [IRC][] / [Webchat][]
+* [Matrix][]
+* [XMPP][]
+
+## Mailing list
+
+Discussions on larger architectural problems and thoughts occasionally happen
+on the [TVL Public Inbox][public inbox] mailing list.
+
+[hackint]: https://hackint.org/
+[IRC]: ircs://irc.hackint.org:6697/#tvix-dev
+[Webchat]: https://webirc.hackint.org/#ircs://irc.hackint.org/#tvix-dev
+[Matrix]: matrix:r/tvix-dev:hackint.org?action=join
+[XMPP]: xmpp:#tvix-dev@irc.hackint.org?join
+[depot@tvl.su]: mailto:depot@tvl.su
+[public inbox]: https://inbox.tvl.su/depot/
diff --git a/tvix/docs/src/contributing/code-&-commits.md b/tvix/docs/src/contributing/code-&-commits.md
new file mode 100644
index 000000000000..628c124bf12f
--- /dev/null
+++ b/tvix/docs/src/contributing/code-&-commits.md
@@ -0,0 +1,76 @@
+# Code & Commits
+
+## Code quality
+
+This one should go without sayingโ€‰โ€”โ€‰but please ensure that your code quality
+does not fall below the rest of the project. This is of course very subjective,
+but as an example if you place code that throws away errors into a block in
+which errors are handled properly your change will be rejected.
+
+
+```admonish hint
+Usually there is a strong correlation between the visual appearance of a code
+block and its quality. This is a simple way to sanity-check your work while
+squinting and keeping some distance from your screen ;-)
+```
+
+
+## Commit messages
+
+The [Angular Conventional Commits][angular] style is the general commit style
+used in the Tvix project. Commit messages should be structured like this:
+
+```admonish example
+    type(scope): Subject line with at most a 72 character length
+
+    Body of the commit message with an empty line between subject and
+    body. This text should explain what the change does and why it has
+    been made, *especially* if it introduces a new feature.
+
+    Relevant issues should be mentioned if they exist.
+```
+
+Where `type` can be one of:
+
+* `feat`: A new feature has been introduced
+* `fix`: An issue of some kind has been fixed
+* `docs`: Documentation or comments have been updated
+* `style`: Formatting changes only
+* `refactor`: Hopefully self-explanatory!
+* `test`: Added missing tests / fixed tests
+* `chore`: Maintenance work
+* `subtree`: Operations involving `git subtree`
+
+And `scope` should refer to some kind of logical grouping inside of the
+project.
+
+It does not make sense to include the full path unless it aids in
+disambiguating. For example, when changing the struct fields in
+`tvix/glue/src/builtins/fetchers.rs` it is enough to write
+`refactor(tvix/glue): โ€ฆ`.
+
+Please take a look at the existing commit log for examples.
+
+
+## Commit content
+
+Multiple changes should be divided into multiple git commits whenever possible.
+Common sense applies.
+
+The fix for a single-line whitespace issue is fine to include in a different
+commit. Introducing a new feature and refactoring (unrelated) code in the same
+commit is not fine.
+
+`git commit -a` is generally **taboo**, whereas on the command line you should
+be preferring `git commit -p`.
+
+
+```admonish tip
+Tooling can really help this process. The [lazygit][] TUI or [magit][] for
+Emacs are worth looking into.
+```
+
+
+[angular]: https://www.conventionalcommits.org/en/
+[lazygit]: https://github.com/jesseduffield/lazygit
+[magit]: https://magit.vc
diff --git a/tvix/docs/src/contributing/email.md b/tvix/docs/src/contributing/email.md
new file mode 100644
index 000000000000..238ff388f595
--- /dev/null
+++ b/tvix/docs/src/contributing/email.md
@@ -0,0 +1,33 @@
+# Submitting changes via email
+
+With SSO & local accounts, hopefully Tvix provides you a low-friction or
+privacy-respecting way to make contributions by means of
+[TVLโ€™s self-hosted Gerrit][gerrit]. However, if you still decide differently,
+you may submit a patch via email to `depot@tvl.su` where it will be added to
+Gerrit by a contributor.
+
+Please keep in mind this process is more complicated requiring extra work from
+both us & you:
+
+* You will need to manually check the Gerrit website for updates & someone will
+  need to relay potential comments to/from Gerrit to you as you wonโ€™t get
+  emails from Gerrit.
+* New revisions need to be stewarded by someone uploading changes to Gerrit
+  on your behalf.
+* As CLs cannot change owners, if you decide to get a Gerrit account later on
+  existing CLs need to be abandoned then recreated. This introduces more churn
+  to the review process since prior discussion are disconnected.
+
+Create an appropriate commit locally then send it us using either of these
+options:
+
+* `git format-patch`: This will create a `*.patch` file which you should email to
+  us.
+* `git send-email`: If configured on your system, this will take care of the
+  whole emailing process for you.
+
+The email address is a [public inbox][].
+
+
+[gerrit]: ../contributing/gerrit.html
+[public inbox]: https://inbox.tvl.su/depot/
diff --git a/tvix/docs/src/contributing/gerrit.md b/tvix/docs/src/contributing/gerrit.md
new file mode 100644
index 000000000000..3644e8cb0200
--- /dev/null
+++ b/tvix/docs/src/contributing/gerrit.md
@@ -0,0 +1,110 @@
+# Contributing to Tvix
+
+## Registration
+
+Self-hosted [Gerrit](https://www.gerritcodereview.com) & changelists (CLs) are
+the preferred method of contributions & review.
+
+TVLโ€™s Gerrit supports single sign-on (SSO) using a GitHub, GitLab, or
+StackOverflow account.
+
+Additionally if you would prefer not to use an SSO option or wish to have a
+backup authentication strategy in the event of downed server or otherwise, we
+recommend setting up a TVL-specific LDAP account.
+
+You can create such an account by following these instructions:
+
+1. Checkout [TVLโ€™s monorepo][check-out-monorepo] if you havenโ€™t already
+2. Be a member of `#tvix-dev` (and/or `#tvl`) on [hackint][], a communication
+   network.
+3. Generate a user entry using [//web/pwcrypt](https://signup.tvl.fyi/).
+4. Commit that generated user entry to our LDAP server configuration in
+   [ops/users][ops-users] (for an example, see:
+   [CL/2671](https://cl.tvl.fyi/c/depot/+/2671))
+5. If only using LDAP, submit the patch via email (see [<cite>Submitting
+   changes via email</cite>][email])
+
+
+## Gerrit setup
+
+Gerrit uses the concept of change IDs to track commits across rebases and other
+operations that might change their hashes, and link them to unique changes in
+Gerrit.
+
+First, [upload your public SSH keys to Gerrit][Gerrit SSH]. Then change your
+remote to point to your newly-registered user over SSH. Then follow up with Git
+config by setting the default push URLs for & installing commit hooks for a
+smoother Gerrit experience.
+
+```console
+$ cd depot
+$ git remote set-url origin "ssh://$USER@code.tvl.fyi:29418/depot"
+$ git config remote.origin.url "ssh://$USER@code.tvl.fyi:29418/depot"
+$ git config remote.origin.push "HEAD:refs/for/canon"
+$ curl -L --compressed https://cl.tvl.fyi/tools/hooks/commit-msg | tee .git/hooks/commit-msg
+โ€ฆ
+if ! mv "${dest}" "$1" ; then
+  echo "cannot mv ${dest} to $1"
+  exit 1
+fi
+$ chmod +x .git/hooks/commit-msg
+```
+
+## Gerrit workflow
+
+The workflow on Gerrit is quite different than the pull request (PR) model that
+many developers are more likely to be accustomed to. Instead of pushing changes
+to remote branches, all changes have to be pushed to `refs/for/canon`. For each
+commit that is pushed there, a change request is created automatically
+
+Every time you create a new commit the change hook will insert a unique
+`Change-Id` tag into the commit message. Once you are satisfied with the state
+of your commit and want to submit it for review, you push it to a Git `ref`
+called `refs/for/canon`. This designates the commits as changelists (CLs)
+targeted for the `canon` branch.
+
+After you feel satisfied with your changes changes, push to the default:
+
+```console
+$ git commit -m 'docs(REVIEWS): Fixed all the errors in the reviews docs'
+$ git push origin
+```
+
+Or to a special target, such as a work-in-progress CL:
+
+```console
+$ git push origin HEAD:refs/for/canon%wip
+```
+
+During the review process, the reviewer(s) might ask you to make changes. You
+can simply amend[^amend] your commit(s) then push to the same ref (`--force*`
+flags not needed). Gerrit will automatically update your changes.
+
+```admonish caution
+Every individual commit will become a separate change. We do *not* squash
+related commits, but instead submit them one by one. Be aware that if you are
+expecting a different behavior such as attempt something like an unsquashed
+subtree merge, you will produce a *lot* of CLs. This is strongly discouraged.
+```
+
+```admonish tip
+If do not have experience with the Gerrit model, consider reading the
+[<cite>Working with Gerrit: An example</cite>][Gerrit Walkthrough] or
+[<cite>Basic Gerrit Walkthroughโ€‰โ€”โ€‰For GitHub Users</cite>][github-diff].
+
+It will also be important to read about [attention sets][] to understand how
+your โ€˜turnโ€™ works, how notifications will be distributed to users through the
+system, as well as the other [attention set rules][attention-set-rules].
+```
+
+
+[check-out-monorepo]: ./getting-started#tvl-monorepo
+[email]: ../contributing/email.html
+[Gerrit SSH]: https://cl.tvl.fyi/settings/#SSHKeys
+[Gerrit walkthrough]: https://gerrit-review.googlesource.com/Documentation/intro-gerrit-walkthrough.html
+[ops-users]: https://code.tvl.fyi/tree/ops/users/default.nix
+[hackint]: https://hackint.org
+[github-diff]: https://gerrit.wikimedia.org/r/Documentation/intro-gerrit-walkthrough-github.html
+[attention sets]: https://gerrit-review.googlesource.com/Documentation/user-attention-set.html
+[attention-set-rules]: https://gerrit-review.googlesource.com/Documentation/user-attention-set.html#_rules
+[^keycloak]: [^amend]: `git commit --amend`
diff --git a/tvix/docs/src/eval/abandoned/index.md b/tvix/docs/src/eval/abandoned/index.md
new file mode 100644
index 000000000000..1cef704d08d7
--- /dev/null
+++ b/tvix/docs/src/eval/abandoned/index.md
@@ -0,0 +1,3 @@
+# Abandoned ideas
+
+This chapter keeps track of abandoned ideas, and why they were abandoned.
diff --git a/tvix/eval/docs/abandoned/thread-local-vm.md b/tvix/docs/src/eval/abandoned/thread-local-vm.md
index c6a2d5e07e5c..c6a2d5e07e5c 100644
--- a/tvix/eval/docs/abandoned/thread-local-vm.md
+++ b/tvix/docs/src/eval/abandoned/thread-local-vm.md
diff --git a/tvix/eval/docs/bindings.md b/tvix/docs/src/eval/bindings.md
index 2b062cb13d87..4fb35b623580 100644
--- a/tvix/eval/docs/bindings.md
+++ b/tvix/docs/src/eval/bindings.md
@@ -1,5 +1,4 @@
-Compilation of bindings
-=======================
+# Compilation of bindings
 
 Compilation of Nix bindings is one of the most mind-bending parts of Nix
 evaluation. The implementation of just the compilation is currently almost 1000
@@ -62,7 +61,7 @@ This is done by compiling bindings in several phases:
 
    At the end of this phase, we know the stack slots of all namespaces for
    inheriting from, all values inherited from them, and all values (and
-   optionall keys) of bindings at the current level.
+   optionally keys) of bindings at the current level.
 
    Only statically known keys are actually merged, so any dynamic keys that
    conflict will lead to a "key already defined" error at runtime.
@@ -82,8 +81,10 @@ stack when the scope ends.
 
 ## Moving parts
 
-WARNING: This documents the *current* implementation. If you only care about the
+```admonish caution
+This documents the *current* implementation. If you only care about the
 conceptual aspects, see above.
+```
 
 There's a few types involved:
 
diff --git a/tvix/eval/docs/build-references.md b/tvix/docs/src/eval/build-references.md
index badcea11550e..dd53f65d83aa 100644
--- a/tvix/eval/docs/build-references.md
+++ b/tvix/docs/src/eval/build-references.md
@@ -1,5 +1,4 @@
-Build references in derivations
-===============================
+# Build references in derivations
 
 This document describes how build references are calculated in Tvix. Build
 references are used to determine which store paths should be available to a
@@ -23,8 +22,10 @@ formats:
    This format is used for a special case where a derivation attribute directly
    refers to a derivation path (e.g. by accessing `.drvPath` on a derivation).
 
-   Note: In C++ Nix this case is quite special and actually requires a
-   store-database query during evaluation.
+   ```admonish note
+   In C++ Nix this case is quite special and actually requires a store-database
+   query during evaluation.
+   ```
 
 3. `<path>` - a non-descript store path input, usually a plain source file (e.g.
    from something like `src = ./.` or `src = ./foo.txt`).
@@ -90,8 +91,10 @@ C++ Nix has several builtins that interface directly with string contexts:
 * `unsafeDiscardOutputDependency`: drops dependencies on the *outputs* of a
   `.drv` in the context, passing only the literal `.drv` itself
 
-  Note: This is only used for special test-cases in nixpkgs, and deprecated Nix
+  ```admonish note
+  This is only used for special test-cases in nixpkgs, and deprecated Nix
   commands like `nix-push`.
+  ```
 * `getContext`: returns the string context in serialised form as a Nix attribute
   set
 * `appendContext`: adds a given string context to the string in the same format
@@ -159,8 +162,10 @@ one evaluation should be created in Nix. This metadata needs to be available in
 These queries will need to be asked of the metadata when populating the
 derivation fields.
 
-Note: Depending on how we implement `builtins.placeholder`, it might be useful
+```admonish note
+Depending on how we implement `builtins.placeholder`, it might be useful
 to track created placeholders in this metadata, too.
+```
 
 ### Context builtins
 
diff --git a/tvix/eval/docs/builtins.md b/tvix/docs/src/eval/builtins.md
index dba4c48c65e1..d9fcd72ccab5 100644
--- a/tvix/eval/docs/builtins.md
+++ b/tvix/docs/src/eval/builtins.md
@@ -1,5 +1,4 @@
-Nix builtins
-============
+# Nix builtins
 
 Nix has a lot of built-in functions, some of which are accessible in
 the global scope, and some of which are only accessible through the
diff --git a/tvix/eval/docs/catchable-errors.md b/tvix/docs/src/eval/catchable-errors.md
index ce320a921777..ce320a921777 100644
--- a/tvix/eval/docs/catchable-errors.md
+++ b/tvix/docs/src/eval/catchable-errors.md
diff --git a/tvix/eval/docs/known-optimisation-potential.md b/tvix/docs/src/eval/known-optimisation-potential.md
index 0ab185fe1be2..11babcb59ac1 100644
--- a/tvix/eval/docs/known-optimisation-potential.md
+++ b/tvix/docs/src/eval/known-optimisation-potential.md
@@ -1,5 +1,4 @@
-Known Optimisation Potential
-============================
+# Known Optimisation Potential
 
 There are several areas of the Tvix evaluator code base where
 potentially large performance gains can be achieved through
diff --git a/tvix/eval/docs/language-issues.md b/tvix/docs/src/eval/language-issues.md
index 152e6594a1d0..152e6594a1d0 100644
--- a/tvix/eval/docs/language-issues.md
+++ b/tvix/docs/src/eval/language-issues.md
diff --git a/tvix/eval/docs/opcodes-attrsets.md b/tvix/docs/src/eval/opcodes-attrsets.md
index 7026f3319dda..7026f3319dda 100644
--- a/tvix/eval/docs/opcodes-attrsets.md
+++ b/tvix/docs/src/eval/opcodes-attrsets.md
diff --git a/tvix/eval/docs/recursive-attrs.md b/tvix/docs/src/eval/recursive-attrs.md
index c30cfd33e6c7..5ce1cb2b64ff 100644
--- a/tvix/eval/docs/recursive-attrs.md
+++ b/tvix/docs/src/eval/recursive-attrs.md
@@ -1,5 +1,4 @@
-Recursive attribute sets
-========================
+# Recursive attribute sets
 
 The construction behaviour of recursive attribute sets is very
 specific, and a bit peculiar.
diff --git a/tvix/eval/docs/vm-loop.md b/tvix/docs/src/eval/vm-loop.md
index 6266d34709cb..a75c7eec31df 100644
--- a/tvix/eval/docs/vm-loop.md
+++ b/tvix/docs/src/eval/vm-loop.md
@@ -1,5 +1,4 @@
-tvix-eval VM loop
-=================
+# tvix-eval VM loop
 
 This document describes the new tvix-eval VM execution loop implemented in the
 chain focusing around cl/8104.
diff --git a/tvix/docs/src/getting-started.md b/tvix/docs/src/getting-started.md
new file mode 100644
index 000000000000..1cbb6de7d4f7
--- /dev/null
+++ b/tvix/docs/src/getting-started.md
@@ -0,0 +1,59 @@
+# Getting Started
+
+## Getting the code, a developer shell, & building the CLI
+
+Tvix can be built with the Rust standard `cargo build`. A Nix shell is provided
+with the correctly-versioned tooling to build.
+
+### TVL monorepo
+
+```console
+$ git clone https://code.tvl.fyi/depot.git
+$ cd depot
+```
+
+[Direnv][] is highly recommended in order to enable [`mg`][mg], a tool for
+workflows in monorepos. Follow the [Direnv installation
+instructions][direnv-inst], then after itโ€™s set up continue with:
+
+```console
+$ direnv allow
+$ mg shell //tvix:shell
+$ cd tvix
+$ cargo build
+```
+
+### Or just Tvix
+
+At present, this option isnโ€™t suitable for contributions & lacks the tooling of
+the monorepo, but still provides a `shell.nix` which can be used for building
+the Tvix project.
+
+```console
+$ git clone https://code.tvl.fyi/depot.git:workspace=views/tvix.git
+$ cd tvix
+$ nix-shell
+$ cargo build
+```
+
+
+# Builds & tests
+
+All projects are built using [Nix][] to avoid โ€˜build pollutionโ€™ via the userโ€™s
+local environment.
+
+If you have Nix installed and are contributing to a project tracked in this
+repository, you can usually build the project by calling `nix-build -A
+path.to.project`.
+
+For example, to build a project located at `//tools/foo` you would call
+`nix-build -A tools.foo`
+
+If the project has tests, check that they still work before submitting your
+change.
+
+
+[Direnv]: https://direnv.net
+[direnv-inst]: https://direnv.net/docs/installation.html
+[Nix]: https://nixos.org/nix/
+[mg]: https://code.tvl.fyi/tree/tools/magrathea
diff --git a/tvix/docs/src/introduction.md b/tvix/docs/src/introduction.md
new file mode 100644
index 000000000000..744fbeec9fbe
--- /dev/null
+++ b/tvix/docs/src/introduction.md
@@ -0,0 +1,23 @@
+# Introduction
+
+Tvix  (\[tvษชks\], [๐Ÿ”ˆ][pronunciation]) is a new Rust implementation of the
+components of the [Nix package manager][Nix].
+
+Tvixโ€™s modularity & composability allows recombining its parts in novel ways.
+It also provides library access to Nix data formats and concepts. In the
+long-run, Tvix aims to produce a Nixpkgs-compatible alternative to NixCpp
+with respects to evaluation and building Nix expressions & systems.
+
+Tvix still is in its early stages of development, **you cannot yet use it as a
+Nix replacement**. However, if you willing to roll up your sleeves and pipe
+together some existing functionality, it may already provide most of what is
+needed for your usecase! [Get in touch](./community.md) if you want to 
+collaborate or contribute.
+
+Tvix is developed as a GPLv3-licensed free software project with
+source code available in the [TVL monorepo][].
+
+[Nix]: https://nixos.org
+[TVL]: https://tvl.fyi
+[TVL monorepo]: https://cs.tvl.fyi/depot/-/tree/tvix
+[pronunciation]: data:audio/mpeg;base64,SUQzBAAAAAAAI1RTU0UAAAAPAAADTGF2ZjU4Ljc2LjEwMAAAAAAAAAAAAAAA//NgxAAdEr31SGDGIQAoBRVe/7Yuv8SvC9C3dwMwOCEAE6YG/Cc/fQkSu5PE0Jzr6cRKhH6IEIIiIhOyU5Xdxbk5/0+vpvppXc/hQkrn7iCrn6e/XPield3Mq5oXh3cOLjgB3/+Zl4HAMuH/mAZpM8w8PDw9wAIk//5//gAODz/D1QNAKDLoeuUxZd2VeX0qV80WaVrSQech1Qpo//NixBofrA4YAVQoAYBzKPexmkFBrpGj2IrFxAh2NqwmjI9Bz+zGah1nVTsQQYlvF69rIzoTfLO6sqxMenRnYhhQe0yrQUNoTIrIymeh9/WtTnRjsk4wUEzE/10PE3ibzlRnQc6/o9BICCbzEVRN/E4CAAcgABBIgEtvMot7RXUwz1qnlmvx3zszWyTQ3PnVX6qRmWjvMuWn9CQGyP/zYsQrJuN+NYuYWAEASB1virzQzUvD2SBsJ598MYyYWUpZ6qA6mEkHbSSIDhm8/H9plcHHrYez50ll+Wj//uv7ZT1GfzU3fbbr+n7LPuVZHL9SPn+nww5M3d//3WdmV5O//9HYa2v9/MO7/iPY+tbPP3mqOf7M9YkkMSav2ub42nG43G5HIvvggKFUSYCII1JJlw5Z6Zl7q1gf4Yn/82LEHyfjCo29jHgCUQzqTIkkWtXFjjE6fsipzrFmZQMjI5ItTsH9c63TUWysQ5U0gKuGfMHW9xc1on58bw/fZTZkXamGN4tIM0bGPq0SlY8sJwn//v8/eNU1//j5zvGMb1u8e9/hz3nUPeb4v////r5+a1/9//97/3nesw9/3pKBi////wi4LtEAQQBKQdcrtttdriljiVQZDRBu//NgxA8jssa+X4xAAIyZIQuhQmsVXnSZQfa38AQAguDwKgVRGIoN7QuWGlkB4o01SRTOVz0IMe20lLfhYMST5BsMuNazInz4SXvROUIqeXKGlRF+tFu/ufB9OrGVnjnFYHB/ZL/3//yicvMVv8JzcxLo90MU0OFzaD9BIV4nxgAM8o54spq1vWJ3J1euBsqXgjzNIQwkDXFa+z0M//NixA8jY36IAZlAAE0Rhou7HpU1l3MchEFXAVQ+LzxKHghCoadED8LrAiMaa3xRZJAknkov/0FXWIS2baSv/8BUUcW+QUiR1evWF///Bt0o6RW5m5hYJUYsXF8p////0IR5Zrg8JXOOYVF8gZJHCk1zqTKx/Hf/NfEKJk//Q4OOvjk2lgr2KwVRuroANCW22220WC22WxxSJFJw///zYsQRIPpawl+PQAJIok7ku2H1FcavOpWYUpCxe5s1Q+UMGBAYIxQMDgDA7pVwZaCAHim/+9y90RRhX/+fFaUgdCMJty+f/76mfduCSBt2zTfz///0ic173oKSszA1maHKny9l0yXDjzAfxax9QnSpfGXFwuopIpFDapMcuhPXAR3d0afa5G6KRfINNDCox6CHM0gRpAHhcjpg5S//82LEHSehUqZ/2MAAstHIU01fRJTCUN0W5ONikDty+mkOcMdZggMiyaKrk5FopDHCDIREY5AUvBrQy6mEtXwj5KF2NnjqmcNPxI1hFYIw4cDPo7jsORGJyxhvu7VJ9JSU9PfHgh+s+kasgvIDmlycQOTKARwslggLg44MRq0e9zQPs94laXjb3mzJ4UOxRZAAVyUswcgDasxCuqXL//NgxA4fmbakXspMYGEihpTRnTNbuO2ScS7G8HnUYI0VUba51KySxTFPhAGIBWhZaUlpDwIKEUEFhRRIzMs0un31sV8pgYkDIo/////NZ7kCXBR5z/SmcfCtV7CBYROck2wt1PNAIWBpH20Ch06Mi7nAk6+0XTcL0j1jHvMKBjRANkjutdRwilbDIuehzRxbsWWi8O2LtKEAnFGg//NixB4Y2VKpHsDEnBAxM9VjUzUTxwpSxo3AKtq31VWBlKwUfHv8MKcPFQF6xg8NDlvb/jYab0ka3Q02RWr6nnoat/kxFMsfSd9N1V70VMQEQlUAAWVIF9yCKxm5BjQQyFCjvEAhZkFiwpgEl+ZTMQ9VBAo8lZIkcRIsmUt2JOCoEsO02W3zMnWjMttVnOJbLY1fZOAgf61FKRAJy//zYsRKHYIKclTJhJz/EmyjLKhmM5jG/m/MKFyxPj5WLwlshMRM9RJbjSEpKJ2XajtyqPrsVoMplgElrTccgAtkE8gNDDUgCYAXcJ+KZwji3XvujuyuqqkxkbfGZ1DKtgZj2KNQokgJo0LNS43+WVCkYca9L7GVPh3PpOFRqX6l/5/3Rur//7Lw/2/gXzNXUmb/8+r+cqrG1/jHV1L/82LEZB9r2lmdUxgBY12NW/8yra/VWCY2onY2OqTCqWoDlFYuNJIHSADU0zPoXEANHC/CJQ9VXi2VkPWYu/o3ey675hhm83g4Gol1zFaZnNPdWtnvpzz3pm23+qfMRrP3/6/r/X0+hh85+nmX/Xfm9z9jxwgQSQmIOEPp/VLtr+7q30XaQOBi7SH6gvr/6+/759gUGYNgwPy9etHb//NgxHYY48Zc8484AJ+xN8+cpj5c1N6jT6efgxULyUJDRo8HjxIoqkgnU6rJKkyo1MmEVdlbq8/tio2Xzz+92+/hh7Y2PfDnnZb////tivb//ppv///3GtRNroKtdtiYhOyKSA54dcWIJUKiA8CqGhH/EQqaTS22Kh0VuREVT2/1jkddqAETOd02plawNh7D0gUlIdr7skBZn36Z//NixKEgqp6NlYxYAILyK/Me3NLnji/AGNiP/ppZtSKjlJ0l2MObV3mZmlKRXWLGCQYQn9E46Jzv2Dzpxf/ysWLHKsCQYdizplszP2DBZSnMOXSKJbJb9FksHm6vsrKhodn6+PayULvD6VXqeH9uhIKMilwo0yP2CBnvLSy5ndd/2V52KZe9tOpbuyd50yIzNGgAOzgxhghLnrj73v/zYsSuHxo60l/PYAIQvV0+Y8c5Eve9aCLA7KyvyIpHHTD8kJXBY/XQ6tJix57v/z7aytZXaxOQCgTtIiBnYRmvAn2ZvJfF28zb+Z/Wrwcqb/5/Lgd8RmsQIfWbvKmJZVsISTlOJrSMeiTp8eUdLeIww1K0tPobChnijzJqdpCJGzAMMm+137+u1ZW2uNLKYbv3Gp79FOb9/28bG1f/82LEwR+bMspeEZOsn/dzWfGbF0+9GL7VV/TnVRMHHkLI1ESlvkmsXHOKdIxhqJB5JP0v3//d3ec/7ZetjPpTRMCX/o6Cn9pNqMqqp20AIAUhbGQ2DwQQQajwGmTvpPPSJnvJq0tNMO2ctFOry7oUse5nGs7ig8jCIYQ3qdHYjFUUgVysGNUxCFFnQIsEwuheniLPf/0BXDnTKQBN//NgxNIfOz7PHjBM8CgRwalCwMUpnK7E5qBQEbDK2TUjYjWH2k3dej3zKqqtTHLqs9vnyUABQryVhSrrkyQmUKEyNw88ZKsHE3objjf+74httv+/7btZmU0T2JS6eKit8fLYwttZwfiZViggsacmKkdh5ktgIc8eqhp1WK2SNdYROKUApEUkD5b///7JlyI0S////2igt0ZUAoGR//NixOQd0rLHHlmEPAgysKqZnSLShsPjwV5YYAMPciIi7Ci6nWZJTz0MYSRsSxCiSQ4VJOUHQ7EQ9wkfzy/kPTrqVTK7NYDABwwlZGkyjLS80AYda6qhOy6TSym7epLitq8UJx4QIyIy1Hp4tNC4hSzspu9rPQ/P/9ZVIn0qoAIfdBUESW1UKGLSz4/pEzRC1EQ6kweN/JKHRUpUe//zYsT8KGruolJL04QXyh0uNArGozLD+BhSxsBVGq6QCMKqsGYMBMrUgqki2HXQme1PcskWvW74a8ra7QoAigIobgqjopUJPqbNBY8/DxS92FTwxupQx5wodcqHbcTeCAXqmqkinH6iNJHaKmXLGKEJhKncFgtEGSwkgSSOXMhMm5/3nEpmStNNRqu3p21/LaxJL1RjKUv/Khuu8yP/82DE6iJa2pZMSUfE0e5SmVHKxvSa6AMK0cpeIjjGM6sxUOVjKIh0ylZSpV8xqCQc3A1lRkqr88y7rO1aKlwMtBNoJmhGGCDxgh6UJfVM2PXVKmLM5lkah6mdmczj0nZS6sSjUtgKHoakUZGigFACs7Sl4SZKW8wDBTbOU6IMuq/75Tvn5yU53/OJJUcSx5kjM+fsYU+Z//USxW//82LE7yQTFmmCwYswDAQ86RpBWGiqw1Dp3DqwVATjoifDskVQVcoKB0NHutyw0Gmekq5p7JVuUeqqTEFNRaqqUJagECihZAmgmpE40ouLi2v/mRkfRrLJUcZlDBQoYGCDo5GyhgcdDIjWWWW//////mrKGBgg7oZMoYGiORqGCggYMFUcjL/7LmasoYGCDhI6GTLLZZZZb5GrWSx0//NixO4ikg4wCtGFFDJlWVDJlYMFBAwjoZFLP/8mVrLY5GrWSo5MrUKCBwxKtUxBTUUzLjEwMFVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVf/zYsTtH1vZLBQwRq1VVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVU=
diff --git a/tvix/docs/src/language-spec.md b/tvix/docs/src/language-spec.md
index 0ff1dc491e3c..b3908b2cf48a 100644
--- a/tvix/docs/src/language-spec.md
+++ b/tvix/docs/src/language-spec.md
@@ -1,8 +1,10 @@
 # Specification of the Nix Language
 
-WARNING: This document is a work in progress. Please keep an eye on
+```admonish attention
+This document is a work in progress. Please keep an eye on
 [`topic:nix-spec`](https://cl.tvl.fyi/q/topic:nix-spec) for ongoing
 CLs.
+```
 
 Nix is a general-purpose, functional programming language which this
 document aims to describe.
diff --git a/tvix/docs/src/nix-daemon/changelog.md b/tvix/docs/src/nix-daemon/changelog.md
index bc99dc6af00d..41c168374c50 100644
--- a/tvix/docs/src/nix-daemon/changelog.md
+++ b/tvix/docs/src/nix-daemon/changelog.md
@@ -34,7 +34,7 @@ So this means that any version of Nix 2.x can't talk to Nix 0.x.
 | --------------- | -- | -------------- | -------- | ----------- | ----- |
 | *Quit           | 0  | [a711689368][a711689368] || 0.11 | Became dead code in [7951c3c54][7951c3c54] (Nix 0.11) and removed in [d3c61d83b][d3c61d83b] (Nix 1.8) |
 | IsValidPath     | 1  | [a711689368][a711689368] || 0.11 ||
-| HasSubstitutes  | 3  | [0565b5f2b3][0565b5f2b3] || 0.11 ||
+| HasSubstitutes  | 3  | [0565b5f2b3][0565b5f2b3] || 0.11 | Obsolete [09a6321aeb][09a6321aeb]<br>Nix 1.2 Protocol 1.12 |
 | QueryPathHash   | 4  | [0565b5f2b3][0565b5f2b3] || 0.11 | Obsolete [e0204f8d46][e0204f8d46]<br>Nix 2.0 Protocol 1.16 |
 | QueryReferences | 5  | [0565b5f2b3][0565b5f2b3] || 0.11 | Obsolete [e0204f8d46][e0204f8d46]<br>Nix 2.0 Protocol 1.16 |
 | QueryReferrers  | 6  | [0565b5f2b3][0565b5f2b3] || 0.11 ||
diff --git a/tvix/docs/src/nix-daemon/handshake.md b/tvix/docs/src/nix-daemon/handshake.md
new file mode 100644
index 000000000000..0a436372b3ff
--- /dev/null
+++ b/tvix/docs/src/nix-daemon/handshake.md
@@ -0,0 +1,32 @@
+
+
+## client -> server
+- 0x6e697863 :: [Int](#int) (hardcoded, 'nixc' in ASCII)
+
+## server -> client
+- 0x6478696f :: [Int](#int) (hardcoded, 'dxio' in ASCII)
+- protocolVersion :: [Int](#int)
+
+## client -> server
+- clientVersion :: [Int](#int)
+
+### If clientVersion is 1.14 or later
+- sendCpu :: [Bool](#bool) (hardcoded to false in client)
+#### If sendCpu is true
+- cpuAffinity :: [Int](#int) (obsolete and ignored)
+
+### If clientVersion is 1.11 or later
+- reserveSpace :: [Bool](#bool) (obsolete, ignored and set to false)
+
+
+## server -> client
+
+### If clientVersion is 1.33 or later
+- nixVersion :: String
+
+### If clientVersion is 1.35 or later
+- trusted :: OptTrusted
+
+## server -> client
+- send logs
+- [operation](./operations.md) :: Int
\ No newline at end of file
diff --git a/tvix/docs/src/nix-daemon/index.md b/tvix/docs/src/nix-daemon/index.md
new file mode 100644
index 000000000000..e47c20151e0d
--- /dev/null
+++ b/tvix/docs/src/nix-daemon/index.md
@@ -0,0 +1,15 @@
+# Nix Daemon Protocol
+
+The Nix Daemon protocol is what's used to communicate with the `nix-daemon`,
+either on the local system (in which case the communication happens via a Unix
+domain socket), or with a remote Nix (in which this is tunneled over SSH).
+
+It uses a custom binary format which isn't too documented. The subpages here
+collect serve as an in-depth detail about some of the inner workings, data types
+etc.
+
+A first implementation of this exists in
+[griff/Nix.rs](https://github.com/griff/Nix.rs/tree/main).
+
+Work is underway to port / factor this out into reusable building blocks into
+the [nix-compat] crate.
diff --git a/tvix/docs/src/nix-daemon/logging.md b/tvix/docs/src/nix-daemon/logging.md
index 70fe0882fa6d..c2828b13c21a 100644
--- a/tvix/docs/src/nix-daemon/logging.md
+++ b/tvix/docs/src/nix-daemon/logging.md
@@ -13,20 +13,21 @@ While not in this state between request reading and response sending all
 messages and activities are buffered until next time the logger can send data.
 
 The logging messages supported are:
-- `STDERR_LAST`
-- `STDERR_ERROR`
-- `STDERR_NEXT`
-- `STDERR_READ`
-- `STDERR_WRITE`
-- `STDERR_START_ACTIVITY`
-- `STDERR_STOP_ACTIVITY`
-- `STDERR_RESULT`
+- [`STDERR_LAST`](#stderr_last)
+- [`STDERR_ERROR`](#stderr_error)
+- [`STDERR_NEXT`](#stderr_next)
+- [`STDERR_READ`](#stderr_read)
+- [`STDERR_WRITE`](#stderr_write)
+- [`STDERR_START_ACTIVITY`](#stderr_start_activity)
+- [`STDERR_STOP_ACTIVITY`](#stderr_stop_activity)
+- [`STDERR_RESULT`](#stderr_result)
 
 
 ### `STDERR_LAST`
 Marks the end of the logs, normal processing can resume.
 
-- 0x616c7473 :: [UInt64][se-UInt64] (hardcoded)
+- 0x616c7473 :: [UInt64][se-UInt64] (hardcoded, 'alts' in ASCII)
+
 
 ### `STDERR_ERROR`
 This also marks the end of this log "session" and so it
@@ -34,20 +35,20 @@ has the same effect as `STDERR_LAST`.
 On the client the error is thrown as an exception and no response is read.
 
 #### If protocol version is 1.26 or newer
-- 0x63787470 :: [UInt64][se-UInt64] (hardcoded)
+- 0x63787470 :: [UInt64][se-UInt64] (hardcoded, 'cxtp' in ASCII)
 - error :: [Error][se-Error]
 
 #### If protocol version is older than 1.26
-- 0x63787470 :: [UInt64][se-UInt64] (hardcoded)
-- msg :: [String][se-String]
+- 0x63787470 :: [UInt64][se-UInt64] (hardcoded, 'cxtp' in ASCII)
+- msg :: [String][se-String] (If logger is JSON, invalid UTF-8 is replaced with U+FFFD)
 - exitStatus :: [Int][se-Int]
 
 
 ### `STDERR_NEXT`
 Normal string log message.
 
-- 0x6f6c6d67 :: [UInt64][se-UInt64] (hardcoded)
-- msg :: [String][se-String]
+- 0x6f6c6d67 :: [UInt64][se-UInt64] (hardcoded, 'olmg' in ASCII)
+- msg :: [String][se-String] (If logger is JSON, invalid UTF-8 is replaced with U+FFFD)
 
 
 ### `STDERR_READ`
@@ -56,15 +57,17 @@ It works by sending a desired buffer length and then on the receiver stream it
 reads bytes buffer of that length. If it receives 0 bytes it sees this as an
 unexpected EOF.
 
-- 0x64617461 :: [UInt64][se-UInt64] (hardcoded)
+- 0x64617461 :: [UInt64][se-UInt64] (hardcoded, 'data' in ASCII)
 - desiredLen :: [Size][se-Size]
 
+
 ### `STDERR_WRITE`
 Writer interface used by ExportPath. Simply writes a buffer.
 
 - 0x64617416 :: [UInt64][se-UInt64] (hardcoded)
 - buffer :: [Bytes][se-Bytes]
 
+
 ### `STDERR_START_ACTIVITY`
 Begins an activity. In other tracing frameworks this would be called a span.
 
@@ -73,11 +76,11 @@ versions of the protocol instead of sending an `STDERR_START_ACTIVITY`
 the level is checked against enabled logging level and the text field is
 sent as a simple log message with `STDERR_NEXT`.
 
-- 0x53545254 :: [UInt64][se-UInt64] (hardcoded)
+- 0x53545254 :: [UInt64][se-UInt64] (hardcoded, 'STRT' in ASCII)
 - act :: [UInt64][se-UInt64]
 - level :: [Verbosity][se-Verbosity]
 - type :: [ActivityType][se-ActivityType]
-- text :: [String][se-String]
+- text :: [String][se-String] (If logger is JSON, invalid UTF-8 is replaced with U+FFFD)
 - fields :: [List][se-List] of [Field][se-Field]
 - parent :: [UInt64][se-UInt64]
 
@@ -92,7 +95,7 @@ Just sends `ActivityId`.
 Implemented in protocol 1.20. When backwards compatible with older versions of
 the protocol and this message would have been sent it is instead ignored.
 
-- 0x53544f50 :: [UInt64][se-UInt64] (hardcoded)
+- 0x53544f50 :: [UInt64][se-UInt64] (hardcoded, 'STOP' in ASCII)
 
 
 ### `STDERR_RESULT`
@@ -101,14 +104,13 @@ Sends results for a given activity.
 Implemented in protocol 1.20. When backwards compatible with older versions of
 the protocol and this message would have been sent it is instead ignored.
 
-- 0x52534c54 :: [UInt64][se-UInt64] (hardcoded)
+- 0x52534c54 :: [UInt64][se-UInt64] (hardcoded, 'RSLT' in ASCII)
 - act :: [UInt64][se-UInt64]
 - type :: [ResultType][se-ResultType]
 - fields :: [List][se-List] of [Field][se-Field]
 
 
 
-
 [se-UInt64]: ./serialization.md#uint64
 [se-Int]: ./serialization.md#int
 [se-Size]: ./serialization.md#size
diff --git a/tvix/docs/src/nix-daemon/operations.md b/tvix/docs/src/nix-daemon/operations.md
index 0683ab070973..80708c9104b5 100644
--- a/tvix/docs/src/nix-daemon/operations.md
+++ b/tvix/docs/src/nix-daemon/operations.md
@@ -14,11 +14,9 @@
 | [FindRoots](#findroots)                                     | 14 |
 | [SetOptions](#setoptions)                                   | 19 |
 | [CollectGarbage](#collectgarbage)                           | 20 |
-| [QuerySubstitutablePathInfo](#querysubstitutablepathinfo)   | 21 |
 | [QueryAllValidPaths](#queryallvalidpaths)                   | 23 |
 | [QueryPathInfo](#querypathinfo)                             | 26 |
 | [QueryPathFromHashPart](#querypathfromhashpart)             | 29 |
-| [QuerySubstitutablePathInfos](#querysubstitutablepathinfos) | 30 |
 | [QueryValidPaths](#queryvalidpaths)                         | 31 |
 | [QuerySubstitutablePaths](#querysubstitutablepaths)         | 32 |
 | [QueryValidDerivers](#queryvalidderivers)                   | 33 |
@@ -40,17 +38,19 @@
 
 ## Obsolete operations
 
-| Operation                                                 | Id |
-| --------------------------------------------------------- | -- |
-| [QueryPathHash](#querypathhash)                           | 4  |
-| [QueryReferences](#queryreferences)                       | 5  |
-| [AddTextToStore](#addtexttostore)                         | 8  |
-| [SyncWithGC](#syncwithgc)                                 | 13 |
-| [ExportPath](#exportpath)                                 | 16 |
-| [QueryDeriver](#queryderiver)                             | 18 |
-| [QueryDerivationOutputs](#queryderivationoutputs)         | 22 |
-| [ImportPaths](#importpaths)                               | 27 |
-| [QueryDerivationOutputNames](#queryderivationoutputnames) | 28 |
+| Operation                                                   | Id |
+| ----------------------------------------------------------- | -- |
+| [QueryPathHash](#querypathhash)                             | 4  |
+| [QueryReferences](#queryreferences)                         | 5  |
+| [AddTextToStore](#addtexttostore)                           | 8  |
+| [SyncWithGC](#syncwithgc)                                   | 13 |
+| [ExportPath](#exportpath)                                   | 16 |
+| [QueryDeriver](#queryderiver)                               | 18 |
+| [QuerySubstitutablePathInfo](#querysubstitutablepathinfo)   | 21 |
+| [QueryDerivationOutputs](#queryderivationoutputs)           | 22 |
+| [ImportPaths](#importpaths)                                 | 27 |
+| [QueryDerivationOutputNames](#queryderivationoutputnames)   | 28 |
+| [QuerySubstitutablePathInfos](#querysubstitutablepathinfos) | 30 |
 
 
 ## Removed operations
@@ -81,6 +81,7 @@ As the name says checks that a store path is valid i.e. in the store.
 
 This is a pretty core operation used everywhere.
 
+
 ### Inputs
 path :: [StorePath][se-StorePath]
 
@@ -92,6 +93,9 @@ isValid :: [Bool][se-Bool]
 
 **Id:** 3<br>
 **Introduced:** Nix 0.11<br>
+**Obsolete** Protocol 1.12, Nix 1.2<br>
+
+Replaced by QuerySubstitutablePaths.
 
 Checks if we can substitute the input path from a substituter. Uses
 QuerySubstitutablePaths under the hood :/
@@ -115,7 +119,7 @@ Retrieves the base16 NAR hash of a given store path.
 path :: [StorePath][se-StorePath]
 
 ### Outputs
-hash :: [String][se-String] (base16-encoded NAR hash without algorithm prefix)
+hash :: [NARHash][se-NARHash]
 
 
 ## QueryReferences
@@ -130,7 +134,7 @@ Retrieves the references of a given path
 path :: [StorePath][se-StorePath]
 
 ### Outputs
-references :: [List][se-List] of [StorePath][se-StorePath]
+references :: [Set][se-Set] of [StorePath][se-StorePath]
 
 
 ## QueryReferrers
@@ -144,7 +148,7 @@ Retrieves the referrers of a given path.
 path :: [StorePath][se-StorePath]
 
 ### Outputs
-referrers :: [List][se-List] of [StorePath][se-StorePath]
+referrers :: [Set][se-Set] of [StorePath][se-StorePath]
 
 
 ## AddToStore
@@ -156,16 +160,16 @@ Add a new path to the store.
 
 ### Before protocol version 1.25
 #### Inputs
-- baseName :: [String][se-String]
+- baseName :: [StorePathName][se-StorePathName]
 - fixed :: [Bool64][se-Bool64]
 - recursive :: [FileIngestionMethod][se-FileIngestionMethod]
-- hashAlgo :: [String][se-String]
+- hashAlgo :: [HashAlgorithm][se-HashAlgorithm]
 - NAR dump
 
 If fixed is `true`, hashAlgo is forced to `sha256` and recursive is forced to
-`Recursive`.
+`NixArchive`.
 
-Only `Flat` and `Recursive` values are supported for the recursive input
+Only `Flat` and `NixArchive` values are supported for the recursive input
 parameter.
 
 #### Outputs
@@ -173,9 +177,9 @@ path :: [StorePath][se-StorePath]
 
 ### Protocol version 1.25 or newer
 #### Inputs
-- name :: [String][se-String]
+- name :: [StorePathName][se-StorePathName]
 - camStr :: [ContentAddressMethodWithAlgo][se-ContentAddressMethodWithAlgo]
-- refs :: [List][se-List] of [StorePath][se-StorePath]
+- refs :: [Set][se-Set] of [StorePath][se-StorePath]
 - repairBool :: [Bool64][se-Bool64]
 - [Framed][se-Framed] NAR dump
 
@@ -197,9 +201,9 @@ to [AddToStore](#addtostore). And so this corresponds to calling
 wrapped as a NAR.
 
 ### Inputs
-- suffix :: [String][se-String]
+- suffix :: [StorePathName][se-StorePathName]
 - text :: [Bytes][se-Bytes]
-- refs :: [List][se-List] of [StorePath][se-StorePath]
+- refs :: [Set][se-Set] of [StorePath][se-StorePath]
 
 ### Outpus
 path :: [StorePath][se-StorePath]
@@ -213,15 +217,15 @@ path :: [StorePath][se-StorePath]
 Build (or substitute) a list of derivations.
 
 ### Inputs
-paths :: [List][se-List] of [DerivedPath][se-DerivedPath]
+paths :: [Set][se-Set] of [DerivedPath][se-DerivedPath]
 
 #### Protocol 1.15 or newer
-mode :: [BuildMode][se-BuildMode]
+mode :: [BuildMode][se-BuildMode] (defaults to Normal)
 
 Check that connection is trusted before allowing Repair mode.
 
 ### Outputs
-1 :: [Int][se-Int] (hardcoded)
+1 :: [Int][se-Int] (hardcoded and ignored by client)
 
 
 ## EnsurePath
@@ -235,7 +239,7 @@ Checks if a path is valid. Note: it may be made valid by running a substitute.
 path :: [StorePath][se-StorePath]
 
 ### Outputs
-1 :: [Int][se-Int] (hardcoded)
+1 :: [Int][se-Int] (hardcoded and ignored by client)
 
 
 ## AddTempRoot
@@ -253,7 +257,7 @@ deleting store paths that the client is actively doing something with.
 path :: [StorePath][se-StorePath]
 
 ### Outputs
-1 :: [Int][se-Int] (hardcoded)
+1 :: [Int][se-Int] (hardcoded and ignored by client)
 
 
 ## AddIndirectRoot
@@ -267,10 +271,10 @@ created by [AddPermRoot](#addpermroot).
 Only ever sent on the local unix socket nix daemon.
 
 ### Inputs
-path :: [String][se-String]
+path :: [Path][se-Path]
 
 ### Outputs
-1 :: [Int][se-Int] (hardcoded)
+1 :: [Int][se-Int] (hardcoded and ignored by client)
 
 
 ## SyncWithGC
@@ -308,7 +312,7 @@ thing.
 Find the GC roots.
 
 ### Outputs
-roots :: [Map][se-Map] of [String][se-String] to [StorePath][se-StorePath]
+roots :: [Map][se-Map] of [Path][se-Path] to [StorePath][se-StorePath]
 
 The key is the link pointing to the given store path.
 
@@ -333,7 +337,8 @@ Export a store path in the binary format nix-store --import expects. See impleme
 - sign :: [Int][se-Int] (ignored and hardcoded to 0 in client)
 
 ### Outputs
-Uses `STDERR_WRITE` to send dump in export format
+Uses [`STDERR_WRITE`](./logging.md#stderr_write) to send dump in
+[export format][se-ExportFormat]
 
 After dump it outputs.
 
@@ -373,18 +378,18 @@ Only ever used right after the handshake.
 
 ### Inputs
 
-- keepFailed :: [Int][se-Int]
-- keepGoing :: [Int][se-Int]
-- tryFallback :: [Int][se-Int]
+- keepFailed :: [Bool][se-Bool]
+- keepGoing :: [Bool][se-Bool]
+- tryFallback :: [Bool][se-Bool]
 - verbosity :: [Verbosity][se-Verbosity]
-- maxbuildJobs :: [Int][se-Int]
-- maxSilentTime :: [Int][se-Int]
+- maxBuildJobs :: [Int][se-Int]
+- maxSilentTime :: [Time][se-Time]
 - useBuildHook :: [Bool][se-Bool] (ignored and hardcoded to true in client)
 - verboseBuild :: [Verbosity][se-Verbosity]
 - logType :: [Int][se-Int] (ignored and hardcoded to 0 in client)
 - printBuildTrace :: [Int][se-Int] (ignored and hardcoded to 0 in client)
 - buildCores :: [Int][se-Int]
-- useSubstitutes :: [Int][se-Int]
+- useSubstitutes :: [Bool][se-Bool]
 
 ### Protocol 1.12 or newer
 otherSettings :: [Map][se-Map] of [String][se-String] to [String][se-String]
@@ -399,7 +404,7 @@ Find the GC roots.
 
 ### Inputs
 - action :: [GCAction][se-GCAction]
-- pathsToDelete :: [List][se-List] of [StorePath][se-StorePath]
+- pathsToDelete :: [Set][se-Set] of [StorePath][se-StorePath]
 - ignoreLiveness :: [Bool64][se-Bool64]
 - maxFreed :: [UInt64][se-UInt64]
 - removed :: [Int][se-Int] (ignored and hardcoded to 0 in client)
@@ -407,18 +412,19 @@ Find the GC roots.
 - removed :: [Int][se-Int] (ignored and hardcoded to 0 in client)
 
 ### Outputs
-- pathsDeleted :: [List][se-List] of [String][se-String]
+- pathsDeleted :: [Set][se-Set] of [Path][se-Path]
 - bytesFreed :: [UInt64][se-UInt64]
-- 0 :: [UInt64][se-UInt64] (hardcoded)
+- 0 :: [UInt64][se-UInt64] (hardcoded, obsolete and ignored by client)
 
-Depending on the action pathsDeleted is, the GC roots, or the paths that would
-be or have been deleted.
+Depending on the value of the action input the value of output pathsDeleted
+is either, the GC roots, or the paths that would be or have been deleted.
 
 
 ## QuerySubstitutablePathInfo
 
 **Id:** 21<br>
 **Introduced:** Protocol 1.02, Nix 0.12<br>
+**Obsolete:** Protocol 1.12, Nix 1.2<br>
 
 Retrieves the various substitutable paths infos for a given path.
 
@@ -429,10 +435,7 @@ path :: [StorePath][se-StorePath]
 found :: [Bool][se-Bool]
 
 #### If found is true
-- deriver :: [OptStorePath][se-OptStorePath]
-- references :: [List][se-List] of [StorePath][se-StorePath]
-- downloadSize :: [UInt64][se-UInt64]
-- narSize :: [UInt64][se-UInt64]
+- info :: [SubstitutablePathInfo][se-SubstitutablePathInfo]
 
 
 ## QueryDerivationOutputs
@@ -447,7 +450,7 @@ Retrieves all the outputs paths of a given derivation.
 path :: [StorePath][se-StorePath] (must point to a derivation)
 
 ### Outputs
-derivationOutputs :: [List][se-List] of [StorePath][se-StorePath]
+derivationOutputs :: [Set][se-Set] of [StorePath][se-StorePath]
 
 
 ## QueryAllValidPaths
@@ -458,7 +461,7 @@ derivationOutputs :: [List][se-List] of [StorePath][se-StorePath]
 Retrieves all the valid paths contained in the store.
 
 ### Outputs
-paths :: [List][se-List] of [StorePath][se-StorePath]
+paths :: [Set][se-Set] of [StorePath][se-StorePath]
 
 
 ## QueryFailedPaths (removed)
@@ -498,7 +501,7 @@ success :: [Bool64][se-Bool64]
 pathInfo :: [UnkeyedValidPathInfo][se-UnkeyedValidPathInfo]
 
 #### If protocol version is older than 1.17
-If info not found return error with `STDERR_ERROR`
+If info not found return error with [`STDERR_ERROR`](./logging.md#stderr_error)
 
 pathInfo :: [UnkeyedValidPathInfo][se-UnkeyedValidPathInfo]
 
@@ -516,7 +519,7 @@ before the metadata about the store path and so you would typically have
 to store the NAR in memory or temporarily on disk before processing it.
 
 ### Inputs
-List of NAR dumps coming from the ExportPaths operations.
+[List of NAR dumps][se-ImportPaths] coming from one or more ExportPath operations.
 
 ### Outputs
 importedPaths :: [List][se-List] of [StorePath][se-StorePath]
@@ -534,7 +537,7 @@ Retrieves the name of the outputs of a given derivation. EG. out, dev, etc.
 path :: [StorePath][se-StorePath] (must be a derivation path)
 
 ### Outputs
-names :: [List][se-List] of [String][se-String]
+names :: [Set][se-Set] of [OutputName][se-OutputName]
 
 
 ## QueryPathFromHashPart
@@ -542,11 +545,10 @@ names :: [List][se-List] of [String][se-String]
 **Id:** 29<br>
 **Introduced:** Protocol 1.11, Nix 1.1<br>
 
-Retrieves a store path from a base16 (input) hash. Returns "" if no path was
-found.
+Retrieves a store path from a nixbase32 (input) hash.
 
 ### Inputs
-hashPart :: [String][se-String]  (must be a base-16 hash)
+hashPart :: [StorePathHash][se-StorePathHash]
 
 ### Outputs
 path :: [OptStorePath][se-OptStorePath]
@@ -556,18 +558,22 @@ path :: [OptStorePath][se-OptStorePath]
 
 **Id:** 30<br>
 **Introduced:** Protocol 1.12*, Nix 1.2<br>
+**Obsolete:** Protocol 1.19*, Nix 2.0<br>
 
 Retrieves the various substitutable paths infos for set of store paths.
 
+Only ever used in the fallback for QueryMissing which means that if protocol is 1.19 or later
+it is never sent and is therefore obsolete after that.
+
 ### Inputs
 #### If protocol version is 1.22 or newer
 paths :: [Map][se-Map] of [StorePath][se-StorePath] to [OptContentAddress][se-OptContentAddress] 
 
 #### If protocol version older than 1.22
-paths :: [List][se-List] of [StorePath][se-StorePath]
+paths :: [Set][se-Set] of [StorePath][se-StorePath]
 
 ### Outputs
-infos :: [List][se-List] of [SubstitutablePathInfo][se-SubstitutablePathInfo]
+infos :: [Map][se-Map] of [StorePath][se-StorePath] to [SubstitutablePathInfo][se-SubstitutablePathInfo]
 
 
 ## QueryValidPaths
@@ -578,13 +584,13 @@ infos :: [List][se-List] of [SubstitutablePathInfo][se-SubstitutablePathInfo]
 Takes a list of store paths and returns a new list only containing the valid store paths
 
 ## Inputs
-paths :: [List][se-List] of [StorePath][se-StorePath]
+paths :: [Set][se-Set] of [StorePath][se-StorePath]
 
 ### If protocol version is 1.27 or newer
 substitute :: [Bool][se-Bool] (defaults to false if not sent)
 
 ## Outputs
-paths :: [List][se-List] of [StorePath][se-StorePath]
+paths :: [Set][se-Set] of [StorePath][se-StorePath]
 
 
 ## QuerySubstitutablePaths
@@ -599,10 +605,10 @@ In versions of the protocol prior to 1.12 [HasSubstitutes](#hassubstitutes)
 is used to implement the functionality that this operation provides.
 
 ### Inputs
-paths :: [List][se-List] of [StorePath][se-StorePath]
+paths :: [Set][se-Set] of [StorePath][se-StorePath]
 
 ### Outputs
-paths :: [List][se-List] of [StorePath][se-StorePath]
+paths :: [Set][se-Set] of [StorePath][se-StorePath]
 
 
 ## QueryValidDerivers
@@ -616,7 +622,7 @@ Retrieves the derivers of a given path.
 path :: [StorePath][se-StorePath]
 
 ### Outputs
-derivers :: [List][se-List] of [StorePath][se-StorePath]
+derivers :: [Set][se-Set] of [StorePath][se-StorePath]
 
 
 ## OptimiseStore
@@ -627,7 +633,7 @@ derivers :: [List][se-List] of [StorePath][se-StorePath]
 Optimise store by hardlinking files with the same content.
 
 ### Outputs
-1 :: [Int][se-Int] (hardcoded)
+1 :: [Int][se-Int] (hardcoded and ignored by client)
 
 
 ## VerifyStore
@@ -677,14 +683,14 @@ buildResult :: [BuildResult][se-BuildResult]
 **Id:** 37<br>
 **Introduced:** Protocol 1.16, Nix 2.0<br>
 
-Add the signatures associated to a given path.
+Add the signatures associated to a given path. Used by `nix store copy-sigs` and `nix store sign`.
 
 ### Inputs
 - path :: [StorePath][se-StorePath]
-- signatures :: [List][se-List] of [String][se-String]
+- signatures :: [Set][se-Set] of [Signature][se-Signature]
 
 ### Outputs
-1 :: [Int][se-Int] (hardcoded)
+1 :: [Int][se-Int] (hardcoded and ignored by client)
 
 
 ## NarFromPath
@@ -717,12 +723,12 @@ Dumps a path as a NAR
 ### Inputs
 - path :: [StorePath][se-StorePath]
 - deriver :: [OptStorePath][se-OptStorePath]
-- narHash :: [String][se-String] SHA256 NAR hash base 16
-- references :: [List][se-List] of [StorePath][se-StorePath]
+- narHash :: [NARHash][se-NARHash]
+- references :: [Set][se-Set] of [StorePath][se-StorePath]
 - registrationTime :: [Time][se-Time]
 - narSize :: [UInt64][se-UInt64]
 - ultimate :: [Bool64][se-Bool64]
-- signatures :: [List][se-List] of [String][se-String]
+- signatures :: [Set][se-Set] of [Signature][se-Signature]
 - ca :: [OptContentAddress][se-OptContentAddress]
 - repair :: [Bool64][se-Bool64]
 - dontCheckSigs :: [Bool64][se-Bool64]
@@ -731,7 +737,7 @@ Dumps a path as a NAR
 [Framed][se-Framed] NAR dump
 
 #### If protocol version is between 1.21 and 1.23
-NAR dump sent using `STDERR_READ`
+NAR dump sent using [`STDERR_READ`](./logging.md#stderr_read)
 
 #### If protocol version is older than 1.21
 NAR dump sent raw on stream
@@ -746,9 +752,9 @@ NAR dump sent raw on stream
 targets :: [List][se-List] of [DerivedPath][se-DerivedPath]
 
 ### Outputs
-- willBuild :: [List][se-List] of [StorePath][se-StorePath]
-- willSubstitute :: [List][se-List] of [StorePath][se-StorePath]
-- unknown :: [List][se-List] of [StorePath][se-StorePath]
+- willBuild :: [Set][se-Set] of [StorePath][se-StorePath]
+- willSubstitute :: [Set][se-Set] of [StorePath][se-StorePath]
+- unknown :: [Set][se-Set] of [StorePath][se-StorePath]
 - downloadSize :: [UInt64][se-UInt64]
 - narSize :: [UInt64][se-UInt64]
 
@@ -764,7 +770,7 @@ Retrieves an associative map outputName -> storePath for a given derivation.
 path :: [StorePath][se-StorePath]  (must be a derivation path)
 
 ### Outputs
-outputs :: [Map][se-Map] of [String][se-String] to [OptStorePath][se-OptStorePath]
+outputs :: [Map][se-Map] of [OutputName][se-OutputName] to [OptStorePath][se-OptStorePath]
 
 
 ## RegisterDrvOutput
@@ -795,10 +801,10 @@ outputId :: [DrvOutput][se-DrvOutput]
 
 ### Outputs
 #### If protocol is 1.31 or newer
-realisations :: [List][se-List] of [Realisation][se-Realisation]
+realisations :: [Set][se-Set] of [Realisation][se-Realisation]
 
 #### If protocol is older than 1.31
-outPaths :: [List][se-List] of [BaseStorePath][se-BaseStorePath]
+outPaths :: [Set][se-Set] of [StorePath][se-StorePath]
 
 
 ## AddMultipleToStore
@@ -816,7 +822,7 @@ for each small NAR was costly.
 ### Inputs
 - repair :: [Bool64][se-Bool64]
 - dontCheckSigs :: [Bool64][se-Bool64]
-- [Framed][se-Framed] stream of add multiple NAR dump
+- [Framed][se-Framed] stream of [add multiple NAR dump][se-AddMultipleToStore]
 
 
 ## AddBuildLog
@@ -827,11 +833,11 @@ for each small NAR was costly.
 Attach some build logs to a given build.
 
 ### Inputs
-- path :: [String][se-String] (might be [BaseStorePath][se-BaseStorePath])
+- path :: [BaseStorePath][se-BaseStorePath]
 - [Framed][se-Framed] stream of log lines
 
 ### Outputs
-1 :: [Int][se-Int] (hardcoded)
+1 :: [Int][se-Int] (hardcoded and ignored by client)
 
 
 ## BuildPathsWithResults
@@ -856,10 +862,10 @@ results :: [List][se-List] of [KeyedBuildResult][se-KeyedBuildResult]
 
 ### Inputs
 - storePath :: [StorePath][se-StorePath]
-- gcRoot :: [String][se-String]
+- gcRoot :: [Path][se-Path]
 
 ### Outputs
-gcRoot :: [String][se-String]
+gcRoot :: [Path][se-Path]
 
 
 
@@ -884,6 +890,7 @@ gcRoot :: [String][se-String]
 [se-DrvOutput]: ./serialization.md#drvoutput
 [se-Realisation]: ./serialization.md#realisation
 [se-List]: ./serialization.md#list-of-x
+[se-Set]: ./serialization.md#set-of-x
 [se-Map]: ./serialization.md#map-of-x-to-y
 [se-SubstitutablePathInfo]: ./serialization.md#substitutablepathinfo
 [se-ValidPathInfo]: ./serialization.md#validpathinfo
@@ -891,4 +898,7 @@ gcRoot :: [String][se-String]
 [se-BuildResult]: ./serialization.md#buildmode
 [se-KeyedBuildResult]: ./serialization.md#keyedbuildresult
 [se-BasicDerivation]: ./serialization.md#basicderivation
-[se-Framed]: ./serialization.md#framed
\ No newline at end of file
+[se-Framed]: ./serialization.md#framed
+[se-AddMultipleToStore]: ./serialization.md#addmultipletostore-format
+[se-ExportFormat]: ./serialization.md#export-path-format
+[se-ImportPaths]: ./serialization.md#import-paths-format
\ No newline at end of file
diff --git a/tvix/docs/src/nix-daemon/serialization.md b/tvix/docs/src/nix-daemon/serialization.md
index a2694a4dea30..1042c956ba71 100644
--- a/tvix/docs/src/nix-daemon/serialization.md
+++ b/tvix/docs/src/nix-daemon/serialization.md
@@ -6,7 +6,7 @@ Little endian byte order
 
 - len :: [UInt64](#uint64)
 - len bytes of content
-- padding with zeros to ensure 64 bit alignment of content with padding
+- padding with zeros to ensure 64 bit alignment of content + padding
 
 
 ## Int serializers
@@ -16,6 +16,9 @@ Little endian byte order
 
 ### Int64
 [UInt64](#uint64) cast to C `int64_t` with upper bounds checking.
+This means that negative numbers can be written but not read.
+Since this is only used for cpuSystem and cpuUser it is fine that
+negative numbers aren't supported.
 
 ### UInt8
 [UInt64](#uint64) cast to C `uint8_t` with upper bounds checking.
@@ -25,7 +28,8 @@ Little endian byte order
 
 ### Time
 [UInt64](#uint64) cast to C `time_t` with upper bounds checking.
-s
+This means that negative numbers can be written but not read.
+
 ### Bool
 Sent as an [Int](#int) where 0 is false and everything else is true.
 
@@ -35,10 +39,10 @@ Sent as an [UInt64](#uint64) where 0 is false and everything else is true.
 ### FileIngestionMethod
 An [UInt8](#uint8) enum with the following possible values:
 
-| Name      | Int |
-| --------- | --- |
-| Flat      |  0  |
-| Recursive |  1  |
+| Name       | Int |
+| ---------- | --- |
+| Flat       |  0  |
+| NixArchive |  1  |
 
 ### BuildMode
 An [Int](#int) enum with the following possible values:
@@ -137,6 +141,15 @@ An [Int](#int) enum with the following possible values:
 | Int    |  0  |
 | String |  1  |
 
+### OptTrusted
+An [UInt8](#uint8) optional enum with the following possible values:
+
+| Name             | Int |
+| ---------------- | --- |
+| None             |  0  |
+| Some(Trusted)    |  1  |
+| Some(NotTrusted) |  2  |
+
 
 ## Bytes serializers
 
@@ -144,51 +157,107 @@ An [Int](#int) enum with the following possible values:
 Simply a [Bytes](#bytes) that has some UTF-8 string like semantics sometimes.
 
 ### StorePath
-String representation of a full store path.
+[String](#string) representation of a full store path including the store directory.
 
 ### BaseStorePath
-String representation of the basename of a store path. That is the store path
+[String](#string) representation of the basename of a store path. That is the store path
 without the /nix/store prefix.
 
+### StorePathName
+[String](#string) representation of the name part of a base store path. This is the part
+of the store path after the nixbase32 hash and '-'
+
+It must have the following format:
+- Deny ".", "..", or those strings followed by '-'
+- Otherwise check that each character is 0-9, a-z, A-Z or one of +-._?=
+
+### StorePathHash
+[String](#string) representation of the hash part of a base store path. This is the part
+of the store path at the beginning and before the '-' and is in nixbase32 format.
+
+
+### OutputName
+[String](#string) representation of the name of a derivation output.
+This is usually combined with the name in the derivation to form the store path name for the
+store path with this output.
+
+Since output name is usually combined to form a store path name its format must follow the
+same rules as [StorePathName](#storepathname):
+- Deny ".", "..", or those strings followed by '-'
+- Otherwise check that each character is 0-9, a-z, A-Z or one of +-._?=
+
+
 ### OptStorePath
 Optional store path.
 
 If no store path this is serialized as the empty string otherwise it is the same as
 [StorePath](#storepath).
 
+### Path
+[String](#string) representation of an absolute path.
+
+### NARHash
+[String](#string) base16-encoded NAR SHA256 hash without algorithm prefix.
+
+### Signature
+[String](#string) with a signature for the given store path or realisation. This should be
+in the format `name`:`base 64 encoded signature` but this is not enforced in the protocol.
+
+### HashAlgorithm
+[String](#string) with one of the following values:
+- md5
+- sha1
+- sha256
+- sha512
+
+### HashDigest
+[String](#string) with a hash digest in any encoding
+
+### OptHashDigest
+Optional version of [HashDigest](#hashdigest) where empty string means
+no value.
+
+
 ### ContentAddressMethodWithAlgo
-One of the following strings:
-- text:`hash algorithm`
-- fixed:r:`hash algorithm`
-- fixed:`hash algorithm`
+[String](#string) with one of the following formats:
+- text:[HashAlgorithm](#hashalgorithm)
+- fixed:r:[HashAlgorithm](#hashalgorithm)
+- fixed:[HashAlgorithm](#hashalgorithm)
 
-### DerivedPath
-#### If protocol is 1.30 or newer
-        return DerivedPath::parseLegacy(store, s);
-#### If protocol is older than 1.30
-        return parsePathWithOutputs(store, s).toDerivedPath();
+### OptContentAddressMethodWithAlgo
+Optional version of [ContentAddressMethodWithAlgo](#contentaddressmethodwithalgo)
+where empty string means no value.
 
 ### ContentAddress
-String with the format:
-- [ContentAddressMethodWithAlgo](#contentaddressmethodwithalgo):`hash`
+[String](#string) with the format:
+- [ContentAddressMethodWithAlgo](#contentaddressmethodwithalgo):[HashDigest](#hashdigest)
 
 ### OptContentAddress
 Optional version of [ContentAddress](#contentaddress) where empty string means
 no content address.
 
+### DerivedPath
+#### If protocol is 1.30 or newer
+output-names = [OutputName](#outputname), { "," [OutputName](#outputname) }<br>
+output-spec = "*" | output-names<br>
+derived-path = [StorePath](#storepath), [ "!", output-spec ]<br>
+
+#### If protocol is older than 1.30
+[StorePath](#storepath), [ "!", [OutputName](#outputname), { "," [OutputName](#outputname) } ]
+
 ### DrvOutput
-String with format:
-- `hash with any prefix`!`output name`
+[String](#string) with format:
+- `hash with any prefix` "!" [OutputName](#outputname)
 
 ### Realisation
-A JSON object sent as a string.
+A JSON object sent as a [String](#string).
 
 The JSON object has the following keys:
-| Key                   | Value                   |
-| --------------------- | ----------------------- |
-| id                    | [DrvOutput](#drvoutput) |
-| outPath               | [StorePath](#storepath) |
-| signatures            | Array of String         |
+| Key                   | Value                            |
+| --------------------- | -------------------------------- |
+| id                    | [DrvOutput](#drvoutput)          |
+| outPath               | [StorePath](#storepath)          |
+| signatures            | Array of [Signature](#signature) |
 | dependentRealisations | Object where key is [DrvOutput](#drvoutput) and value is [StorePath](#storepath) |
 
 
@@ -200,20 +269,22 @@ A list is encoded as a [Size](#size) length n followed by n encodings of x
 ### Map of x to y
 A map is encoded as a [Size](#size) length n followed by n encodings of pairs of x and y
 
+### Set of x
+A set is encoded as a [Size](#size) length n followed by n encodings of x
 
 ### BuildResult
 - status :: [BuildStatus](#buildstatus)
 - errorMsg :: [String](#string)
 
 #### Protocol 1.29 or newer
-- timesBuilt :: [Int](#int)
-- isNonDeterministic :: [Bool64](#bool64)
-- startTime :: [Time](#time)
-- stopTime :: [Time](#time)
+- timesBuilt :: [Int](#int) (defaults to 0)
+- isNonDeterministic :: [Bool64](#bool64) (defaults to false)
+- startTime :: [Time](#time) (defaults to 0)
+- stopTime :: [Time](#time) (defaults to 0)
 
 #### Protocol 1.37 or newer
-- cpuUser :: [OptMicroseconds](#optmicroseconds)
-- cpuSystem :: [OptMicroseconds](#optmicroseconds)
+- cpuUser :: [OptMicroseconds](#optmicroseconds) (defaults to none)
+- cpuSystem :: [OptMicroseconds](#optmicroseconds) (defaults to none)
 
 #### Protocol 1.28 or newer
 builtOutputs ::  [Map](#map-of-x-to-y) of [DrvOutput](#drvoutput) to [Realisation](#realisations)
@@ -232,23 +303,22 @@ Optional microseconds.
 
 
 ### SubstitutablePathInfo
-- storePath :: [StorePath](#storepath)
 - deriver :: [OptStorePath](#optstorepath)
-- references :: [List](#list-of-x) of [StorePath](#storepath)
+- references :: [Set][#set-of-x] of [StorePath](#storepath)
 - downloadSize :: [UInt64](#uint64)
 - narSize :: [UInt64](#uint64)
 
 
 ### UnkeyedValidPathInfo
 - deriver :: [OptStorePath](#optstorepath)
-- narHash :: [String](#string) SHA256 NAR hash base16 encoded
-- references :: [List](#list-of-x) of [StorePath](#storepath)
+- narHash :: [NARHash](#narhash)
+- references :: [Set](#set-of-x) of [StorePath](#storepath)
 - registrationTime :: [Time](#time)
 - narSize :: [UInt64](#uint64)
 
 #### If protocol version is 1.16 or above
-- ultimate :: [Bool64](#bool64)
-- signatures :: [List](#list-of-x) of [String](#string)
+- ultimate :: [Bool64](#bool64) (defaults to false)
+- signatures :: [Set](#set-of-x) of [Signature](#signature)
 - ca :: [OptContentAddress](#optcontentaddress)
 
 
@@ -257,13 +327,13 @@ Optional microseconds.
 - info :: [UnkeyedValidPathInfo](#unkeyedvalidpathinfo)
 
 ### DerivationOutput
-- path :: [String](#string)
-- hashAlgo :: [String](#string)
-- hash :: [String](#string)
+- path :: [OptStorePath](#optstorepath)
+- hashAlgo :: [OptContentAddressMethodWithAlgo](#optcontentaddressmethodwithalgo)
+- hash :: [OptHashDigest](#opthashdigest)
 
 ### BasicDerivation
-- outputs :: [Map](#map-of-x-to-y) of [String](#string) to [DerivationOutput](#derivationoutput)
-- inputSrcs :: [List](#list-of-x) of [StorePath](#storepath)
+- outputs :: [Map](#map-of-x-to-y) of [OutputName](#outputname) to [DerivationOutput](#derivationoutput)
+- inputSrcs :: [Set](#set-of-x) of [StorePath](#storepath)
 - platform :: [String](#string)
 - builder :: [String](#string)
 - args :: [List](#list-of-x) of [String](#string)
@@ -271,13 +341,13 @@ Optional microseconds.
 
 ### TraceLine
 - havePos :: [Size](#size) (hardcoded to 0)
-- hint :: [String](#string)
+- hint :: [String](#string) (If logger is JSON, invalid UTF-8 is replaced with U+FFFD)
 
 ### Error
 - type :: [String](#string) (hardcoded to `Error`)
 - level :: [Verbosity](#verbosity)
 - name :: [String](#string) (removed and hardcoded to `Error`)
-- msg :: [String](#string)
+- msg :: [String](#string) (If logger is JSON, invalid UTF-8 is replaced with U+FFFD)
 - havePos :: [Size](#size) (hardcoded to 0)
 - traces :: [List](#list-of-x) of [TraceLine](#traceline)
 
@@ -297,9 +367,43 @@ At protocol 1.23 [AddToStoreNar](./operations.md#addtostorenar) introduced a
 framed streaming for sending the NAR dump and later versions of the protocol
 also used this framing for other operations.
 
-At its core the framed streaming is just a series of [Bytes](#bytes) of
-varying length and terminated by an empty [Bytes](#bytes).
+At its core the framed streaming is just a series of [UInt64](#uint64) `size`
+followed by `size` bytes. The stream is terminated when `size` is zero.
+
+Unlike [Bytes](#bytes), frames are *NOT* padded.
 
 This method of sending data has the advantage of not having to parse the data
 to find where it ends. Older versions of the protocol would potentially parse
-the NAR twice.
\ No newline at end of file
+the NAR twice.
+
+
+## AddMultipleToStore format
+
+Paths must be topologically sorted.
+
+- expected :: [UInt64](#uint64)
+
+### Repeated expected times
+- info :: [ValidPathInfo](#validpathinfo)
+- NAR dump
+
+
+## Export path format
+- NAR dump
+- 0x4558494es :: [Int](#int) (hardcoded, 'EXIN' in ASCII)
+- path :: [StorePath](#storepath)
+- references :: [Set](#set-of-x) of [StorePath](#storepath)
+- deriver :: [OptStorePath](#optstorepath)
+- hasSignature :: [Int](#int) (hardcoded to 0 in newer versions)
+
+#### If hasSignature is 1
+- signature :: [String](#string) (ignored)
+
+
+## Import paths format
+
+- hasNext :: [UInt64](#uint64)
+
+### While hasNext is 1
+- [Export path format](#export-path-format)
+- hasNext :: [UInt64](#uint64)
diff --git a/tvix/store/docs/api.md b/tvix/docs/src/store/api.md
index 01e72671a743..89495a0d1c32 100644
--- a/tvix/store/docs/api.md
+++ b/tvix/docs/src/store/api.md
@@ -1,11 +1,10 @@
-tvix-[ca]store API
-==============
+# tvix-[ca]store API
 
 This document outlines the design of the API exposed by tvix-castore and tvix-
 store, as well as other implementations of this store protocol.
 
 This document is meant to be read side-by-side with
-[castore.md](../../tvix-castore/docs/castore.md) which describes the data model
+[Data Model](../castore/data-model.md) which describes the data model
 in more detail.
 
 The store API has four main consumers:
@@ -205,7 +204,7 @@ and potentially a chain of `Directory` objects requested from
 When the desired file is reached, the *BlobService* can be used to read the
 contents of this file, and return it back to the evaluator.
 
-FUTUREWORK: define how importing from symlinks should/does work.
+FUTUREWORK: Define how importing from symlinks should/does work.
 
 Contrary to Nix, this has the advantage of not having to copy all of the
 contents of a store path to the evaluating machine, but really only fetching
@@ -218,7 +217,7 @@ This is useful for people running a Tvix-only system, or running builds on a
 In a system with Nix installed, we can't simply manually "extract" things to
 `/nix/store`, as Nix assumes to own all writes to this location.
 In these use cases, we're probably better off exposing a tvix-store as a local
-binary cache (that's what `//tvix/nar-bridge-go` does).
+binary cache (that's what `//tvix/nar-bridge` does).
 
 Assuming we are in an environment where we control `/nix/store` exclusively, a
 "realize to disk" would either "extract" things from the `tvix-store` to a
diff --git a/tvix/docs/src/value-pointer-equality.md b/tvix/docs/src/value-pointer-equality.md
index d84efcb50ca9..a4539513ef73 100644
--- a/tvix/docs/src/value-pointer-equality.md
+++ b/tvix/docs/src/value-pointer-equality.md
@@ -47,8 +47,10 @@ works in C++ Nix, the only production ready Nix implementation currently availab
 
 ## Nix (Pointer) Equality in C++ Nix
 
-TIP: The summary presented here is up-to-date as of 2023-06-27 and was tested
-with Nix 2.3, 2.11 and 2.15.
+```admonish info
+The summary presented here is up-to-date as of 2023-06-27 and was tested with
+Nix 2.3, 2.11 and 2.15.
+```
 
 ### `EvalState::eqValues` and `ExprOpEq::eval`
 
diff --git a/tvix/eval/Cargo.toml b/tvix/eval/Cargo.toml
index 677ce6ab85be..c99bea4f7125 100644
--- a/tvix/eval/Cargo.toml
+++ b/tvix/eval/Cargo.toml
@@ -8,40 +8,42 @@ name = "tvix_eval"
 
 [dependencies]
 builtin-macros = { path = "./builtin-macros", package = "tvix-eval-builtin-macros" }
-bytes = "1.4.0"
-bstr = { version = "1.8.0", features = ["serde"] }
-codemap = "0.1.3"
-codemap-diagnostic = "0.1.1"
-dirs = "4.0.0"
-genawaiter = { version = "0.99.1", default_features = false }
-imbl = { version = "2.0", features = [ "serde" ] }
-itertools = "0.12.0"
-lazy_static = "1.4.0"
-lexical-core = { version = "0.8.5", features = ["format", "parse-floats"] }
-os_str_bytes = { version = "6.3", features = ["conversions"] }
-path-clean = "0.1"
-proptest = { version = "1.3.0", default_features = false, features = ["std", "alloc", "tempfile"], optional = true }
-regex = "1.6.0"
-rnix = "0.11.0"
-rowan = "*" # pinned by rnix
-serde = { version = "1.0", features = [ "rc", "derive" ] }
-serde_json = "1.0"
-smol_str = "0.2.0"
-tabwriter = "1.2"
-test-strategy = { version = "0.2.1", optional = true }
+bytes = { workspace = true }
+bstr = { workspace = true, features = ["serde"] }
+codemap = { workspace = true }
+codemap-diagnostic = { workspace = true }
+dirs = { workspace = true }
+genawaiter = { workspace = true }
+itertools = { workspace = true }
+lazy_static = { workspace = true }
+lexical-core = { workspace = true, features = ["format", "parse-floats"] }
+os_str_bytes = { workspace = true, features = ["conversions"] }
+path-clean = { workspace = true }
+proptest = { workspace = true, features = ["std", "alloc", "tempfile"], optional = true }
+regex = { workspace = true }
+rnix = { workspace = true }
+rowan = { workspace = true } # pinned by rnix
+serde = { workspace = true, features = ["rc", "derive"] }
+serde_json = { workspace = true }
+smol_str = { workspace = true }
+tabwriter = { workspace = true }
+test-strategy = { workspace = true, optional = true }
 toml = "0.6.0"
-xml-rs = "0.8.4"
-sha2 = "0.10.8"
-sha1 = "0.10.6"
-md-5 = "0.10.6"
-data-encoding = "2.5.0"
+sha2 = { workspace = true }
+sha1 = { workspace = true }
+md-5 = { workspace = true }
+data-encoding = { workspace = true }
+rustc-hash = { workspace = true }
+nohash-hasher = { workspace = true }
+vu128 = { workspace = true }
 
 [dev-dependencies]
-criterion = "0.5"
-itertools = "0.12.0"
-pretty_assertions = "1.2.1"
-rstest = "0.19.0"
-tempfile = "3.3.0"
+criterion = { workspace = true }
+itertools = { workspace = true }
+mimalloc = { workspace = true }
+pretty_assertions = { workspace = true }
+rstest = { workspace = true }
+tempfile = { workspace = true }
 
 [features]
 default = ["impure", "arbitrary", "nix_tests"]
@@ -54,7 +56,12 @@ nix_tests = []
 impure = []
 
 # Enables Arbitrary impls for internal types (required to run tests)
-arbitrary = ["proptest", "test-strategy", "imbl/proptest"]
+arbitrary = ["proptest", "test-strategy"]
+
+# Don't leak strings (enable this if you care about peak memory usage of eval)
+#
+# This is intended as a stop-gap until we have a garbage collector
+no_leak = []
 
 [[bench]]
 name = "eval"
diff --git a/tvix/eval/benches/eval.rs b/tvix/eval/benches/eval.rs
index 57d4eb71b59f..f4d6489f1e5c 100644
--- a/tvix/eval/benches/eval.rs
+++ b/tvix/eval/benches/eval.rs
@@ -1,8 +1,14 @@
 use criterion::{black_box, criterion_group, criterion_main, Criterion};
 use itertools::Itertools;
+use mimalloc::MiMalloc;
+
+#[global_allocator]
+static GLOBAL: MiMalloc = MiMalloc;
 
 fn interpret(code: &str) {
-    tvix_eval::Evaluation::new_pure().evaluate(code, None);
+    tvix_eval::Evaluation::builder_pure()
+        .build()
+        .evaluate(code, None);
 }
 
 fn eval_literals(c: &mut Criterion) {
diff --git a/tvix/eval/build.rs b/tvix/eval/build.rs
index a9c9a78b060d..b37a6e8a0c8d 100644
--- a/tvix/eval/build.rs
+++ b/tvix/eval/build.rs
@@ -5,5 +5,10 @@ fn main() {
         "cargo:rustc-env=TVIX_CURRENT_SYSTEM={}",
         &env::var("TARGET").unwrap()
     );
-    println!("cargo:rerun-if-changed-env=TARGET")
+    println!("cargo:rerun-if-changed-env=TARGET");
+
+    // Pick up new test case files
+    // https://github.com/la10736/rstest/issues/256
+    println!("cargo:rerun-if-changed=src/tests/nix_tests");
+    println!("cargo:rerun-if-changed=src/tests/tvix_tests")
 }
diff --git a/tvix/eval/builtin-macros/Cargo.toml b/tvix/eval/builtin-macros/Cargo.toml
index 3a35ea12a0c0..0696d742b342 100644
--- a/tvix/eval/builtin-macros/Cargo.toml
+++ b/tvix/eval/builtin-macros/Cargo.toml
@@ -5,9 +5,9 @@ authors = [ "Griffin Smith <root@gws.fyi>" ]
 edition = "2021"
 
 [dependencies]
-syn = { version = "1.0.57", features = ["full", "parsing", "printing", "visit", "visit-mut", "extra-traits"] }
-quote = "1.0.8"
-proc-macro2 = "1"
+syn = { version = "1.0.109", features = ["full", "parsing", "printing", "visit", "visit-mut", "extra-traits"] }
+quote = { workspace = true }
+proc-macro2 = { workspace = true }
 
 [lib]
 proc-macro = true
diff --git a/tvix/eval/clippy.toml b/tvix/eval/clippy.toml
new file mode 100644
index 000000000000..c5302112b3ae
--- /dev/null
+++ b/tvix/eval/clippy.toml
@@ -0,0 +1,3 @@
+# See https://nnethercote.github.io/perf-book/hashing.html. Use FxHashMap and
+# FxHashSet, not HashMap and HashSet
+disallowed-types = ["std::collections::HashMap", "std::collections::HashSet"]
diff --git a/tvix/eval/default.nix b/tvix/eval/default.nix
index 91661291f7b6..9370c81ced1c 100644
--- a/tvix/eval/default.nix
+++ b/tvix/eval/default.nix
@@ -1,9 +1,16 @@
 # TODO: find a way to build the benchmarks via crate2nix
-{ depot, pkgs, ... }:
+{ depot, pkgs, lib, ... }:
 
-depot.tvix.crates.workspaceMembers.tvix-eval.build.override {
+(depot.tvix.crates.workspaceMembers.tvix-eval.build.override {
   runTests = true;
 
   # Make C++ Nix available, to compare eval results against.
   testInputs = [ pkgs.nix ];
-}
+}).overrideAttrs (old: rec {
+  meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+  passthru = old.passthru // (depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = [ "nix_tests" ];
+    override.testInputs = [ pkgs.nix ];
+  });
+})
diff --git a/tvix/eval/src/builtins/impure.rs b/tvix/eval/src/builtins/impure.rs
index c82b910f5ffa..dccb7fbb7d8a 100644
--- a/tvix/eval/src/builtins/impure.rs
+++ b/tvix/eval/src/builtins/impure.rs
@@ -9,7 +9,6 @@ use std::{
 use crate::{
     self as tvix_eval,
     errors::ErrorKind,
-    io::FileType,
     value::NixAttrs,
     vm::generators::{self, GenCo},
     NixString, Value,
@@ -60,12 +59,7 @@ mod impure_builtins {
                         NixString::from(
                             String::from_utf8(name.to_vec()).expect("parsing file name as string"),
                         ),
-                        Value::from(match ftype {
-                            FileType::Directory => "directory",
-                            FileType::Regular => "regular",
-                            FileType::Symlink => "symlink",
-                            FileType::Unknown => "unknown",
-                        }),
+                        Value::from(ftype.to_string()),
                     )
                 });
 
@@ -87,6 +81,18 @@ mod impure_builtins {
             }
         }
     }
+
+    #[builtin("readFileType")]
+    async fn builtin_read_file_type(co: GenCo, path: Value) -> Result<Value, ErrorKind> {
+        match coerce_value_to_path(&co, path).await? {
+            Err(cek) => Ok(Value::from(cek)),
+            Ok(path) => Ok(Value::from(
+                generators::request_read_file_type(&co, path)
+                    .await
+                    .to_string(),
+            )),
+        }
+    }
 }
 
 /// Return all impure builtins, that is all builtins which may perform I/O
diff --git a/tvix/eval/src/builtins/mod.rs b/tvix/eval/src/builtins/mod.rs
index 949fe844a20e..f130bbc5b15f 100644
--- a/tvix/eval/src/builtins/mod.rs
+++ b/tvix/eval/src/builtins/mod.rs
@@ -6,11 +6,10 @@
 use bstr::{ByteSlice, ByteVec};
 use builtin_macros::builtins;
 use genawaiter::rc::Gen;
-use imbl::OrdMap;
 use regex::Regex;
 use std::cmp::{self, Ordering};
+use std::collections::BTreeMap;
 use std::collections::VecDeque;
-use std::collections::{BTreeMap, HashSet};
 use std::path::PathBuf;
 
 use crate::arithmetic_op;
@@ -85,9 +84,9 @@ mod pure_builtins {
     use std::ffi::OsString;
 
     use bstr::{BString, ByteSlice, B};
-    use imbl::Vector;
     use itertools::Itertools;
     use os_str_bytes::OsStringBytes;
+    use rustc_hash::FxHashSet;
 
     use crate::{value::PointerEquality, AddContext, NixContext, NixContextElement};
 
@@ -245,7 +244,7 @@ mod pure_builtins {
 
     #[builtin("concatLists")]
     async fn builtin_concat_lists(co: GenCo, lists: Value) -> Result<Value, ErrorKind> {
-        let mut out = imbl::Vector::new();
+        let mut out = Vec::new();
 
         for value in lists.to_list()? {
             let list = try_value!(generators::request_force(&co, value).await).to_list()?;
@@ -258,7 +257,7 @@ mod pure_builtins {
     #[builtin("concatMap")]
     async fn builtin_concat_map(co: GenCo, f: Value, list: Value) -> Result<Value, ErrorKind> {
         let list = list.to_list()?;
-        let mut res = imbl::Vector::new();
+        let mut res = Vec::new();
         for val in list {
             let out = generators::request_call_with(&co, f.clone(), [val]).await;
             let out = try_value!(generators::request_force(&co, out).await);
@@ -274,9 +273,10 @@ mod pure_builtins {
         list: Value,
     ) -> Result<Value, ErrorKind> {
         let mut separator = separator.to_contextful_str()?;
+
         let mut context = NixContext::new();
-        if let Some(sep_context) = separator.context_mut() {
-            context = context.join(sep_context);
+        if let Some(sep_context) = separator.take_context() {
+            context.extend(sep_context.into_iter())
         }
         let list = list.to_list()?;
         let mut res = BString::default();
@@ -296,13 +296,8 @@ mod pure_builtins {
             {
                 Ok(mut s) => {
                     res.push_str(&s);
-                    if let Some(ref mut other_context) = s.context_mut() {
-                        // It is safe to consume the other context here
-                        // because the `list` and `separator` are originally
-                        // moved, here.
-                        // We are not going to use them again
-                        // because the result here is a string.
-                        context = context.join(other_context);
+                    if let Some(other_context) = s.take_context() {
+                        context.extend(other_context.into_iter());
                     }
                 }
                 Err(c) => return Ok(Value::Catchable(Box::new(c))),
@@ -390,13 +385,13 @@ mod pure_builtins {
     #[builtin("filter")]
     async fn builtin_filter(co: GenCo, pred: Value, list: Value) -> Result<Value, ErrorKind> {
         let list: NixList = list.to_list()?;
-        let mut out = imbl::Vector::new();
+        let mut out = Vec::new();
 
         for value in list {
             let result = generators::request_call_with(&co, pred.clone(), [value.clone()]).await;
             let verdict = try_value!(generators::request_force(&co, result).await);
             if verdict.as_bool()? {
-                out.push_back(value);
+                out.push(value);
             }
         }
 
@@ -469,15 +464,6 @@ mod pure_builtins {
         toml::from_str(toml_str.to_str()?).map_err(|err| err.into())
     }
 
-    #[builtin("filterSource")]
-    #[allow(non_snake_case)]
-    async fn builtin_filterSource(_co: GenCo, #[lazy] _e: Value) -> Result<Value, ErrorKind> {
-        // TODO: implement for nixpkgs compatibility
-        Ok(Value::from(CatchableErrorKind::UnimplementedFeature(
-            "filterSource".into(),
-        )))
-    }
-
     #[builtin("genericClosure")]
     async fn builtin_generic_closure(co: GenCo, input: Value) -> Result<Value, ErrorKind> {
         let attrs = input.to_attrs()?;
@@ -493,7 +479,7 @@ mod pure_builtins {
 
         let operator = attrs.select_required("operator")?;
 
-        let mut res = imbl::Vector::new();
+        let mut res = Vec::new();
         let mut done_keys: Vec<Value> = vec![];
 
         while let Some(val) = work_set.pop_front() {
@@ -511,7 +497,7 @@ mod pure_builtins {
                 continue;
             }
 
-            res.push_back(val.clone());
+            res.push(val.clone());
 
             let op_result = generators::request_force(
                 &co,
@@ -532,18 +518,18 @@ mod pure_builtins {
         #[lazy] generator: Value,
         length: Value,
     ) -> Result<Value, ErrorKind> {
-        let mut out = imbl::Vector::<Value>::new();
         let len = length.as_int()?;
+        let mut out = Vec::with_capacity(
+            len.try_into()
+                .map_err(|_| ErrorKind::Abort(format!("can not create list of size {}", len)))?,
+        );
+
         // the best span we can getโ€ฆ
         let span = generators::request_span(&co).await;
 
         for i in 0..len {
-            let val = Value::Thunk(Thunk::new_suspended_call(
-                generator.clone(),
-                i.into(),
-                span.clone(),
-            ));
-            out.push_back(val);
+            let val = Value::Thunk(Thunk::new_suspended_call(generator.clone(), i.into(), span));
+            out.push(val);
         }
 
         Ok(Value::List(out.into()))
@@ -564,7 +550,7 @@ mod pure_builtins {
 
     #[builtin("groupBy")]
     async fn builtin_group_by(co: GenCo, f: Value, list: Value) -> Result<Value, ErrorKind> {
-        let mut res: BTreeMap<NixString, imbl::Vector<Value>> = BTreeMap::new();
+        let mut res: BTreeMap<NixString, Vec<Value>> = BTreeMap::new();
         for val in list.to_list()? {
             let key = try_value!(
                 generators::request_force(
@@ -575,7 +561,7 @@ mod pure_builtins {
             )
             .to_str()?;
 
-            res.entry(key).or_default().push_back(val);
+            res.entry(key).or_default().push(val);
         }
         Ok(Value::attrs(NixAttrs::from_iter(
             res.into_iter()
@@ -641,7 +627,7 @@ mod pure_builtins {
         let elements = groups
             .into_iter()
             .map(|(key, group)| {
-                let mut outputs: Vector<NixString> = Vector::new();
+                let mut outputs: Vec<NixString> = Vec::new();
                 let mut is_path = false;
                 let mut all_outputs = false;
 
@@ -654,7 +640,7 @@ mod pure_builtins {
 
                         NixContextElement::Single { name, derivation } => {
                             debug_assert!(derivation == key, "Unexpected group containing mixed keys, expected: {:?}, encountered {:?}", key, derivation);
-                            outputs.push_back(name.clone().into());
+                            outputs.push(name.clone().into());
                         }
 
                         NixContextElement::Derivation(drv_path) => {
@@ -681,7 +667,7 @@ mod pure_builtins {
                     vec_attrs.push(("outputs", Value::List(outputs
                                 .into_iter()
                                 .map(|s| s.into())
-                                .collect::<Vector<Value>>()
+                                .collect::<Vec<Value>>()
                                 .into()
                     )));
                 }
@@ -717,7 +703,7 @@ mod pure_builtins {
         //
         // In this implementation, we do none of that, no syntax checks, no realization.
         // The next `TODO` are the checks that Nix implements.
-        let mut ctx_elements: HashSet<NixContextElement> = HashSet::new();
+        let mut ctx_elements: FxHashSet<NixContextElement> = FxHashSet::default();
         let span = generators::request_span(&co).await;
         let origin = origin
             .coerce_to_string(
@@ -764,9 +750,8 @@ mod pure_builtins {
         }
 
         if let Some(origin_ctx) = origin.context_mut() {
-            // FUTUREWORK(performance): avoid this clone
-            // and extend in-place.
-            *origin_ctx = origin_ctx.clone().join(&mut ctx_elements.into());
+            origin_ctx.extend(ctx_elements)
+            // TODO: didn't we forget cases where origin had no context?
         }
 
         Ok(origin.into())
@@ -809,7 +794,7 @@ mod pure_builtins {
         }
         let mut right_keys = right_set.keys();
 
-        let mut out: OrdMap<NixString, Value> = OrdMap::new();
+        let mut out: BTreeMap<NixString, Value> = BTreeMap::new();
 
         // Both iterators have at least one entry
         let mut left = left_keys.next().unwrap();
@@ -990,15 +975,16 @@ mod pure_builtins {
     }
 
     #[builtin("map")]
-    async fn builtin_map(co: GenCo, #[lazy] f: Value, list: Value) -> Result<Value, ErrorKind> {
-        let mut out = imbl::Vector::<Value>::new();
+    async fn builtin_map(co: GenCo, #[lazy] f: Value, list_val: Value) -> Result<Value, ErrorKind> {
+        let list = list_val.to_list()?;
+        let mut out = Vec::with_capacity(list.len());
 
         // the best span we can getโ€ฆ
         let span = generators::request_span(&co).await;
 
-        for val in list.to_list()? {
-            let result = Value::Thunk(Thunk::new_suspended_call(f.clone(), val, span.clone()));
-            out.push_back(result)
+        for val in list {
+            let result = Value::Thunk(Thunk::new_suspended_call(f.clone(), val, span));
+            out.push(result)
         }
 
         Ok(Value::List(out.into()))
@@ -1011,7 +997,7 @@ mod pure_builtins {
         attrs: Value,
     ) -> Result<Value, ErrorKind> {
         let attrs = attrs.to_attrs()?;
-        let mut out = imbl::OrdMap::new();
+        let mut out: BTreeMap<NixString, Value> = BTreeMap::new();
 
         // the best span we can getโ€ฆ
         let span = generators::request_span(&co).await;
@@ -1020,9 +1006,9 @@ mod pure_builtins {
             let result = Value::Thunk(Thunk::new_suspended_call(
                 f.clone(),
                 key.clone().into(),
-                span.clone(),
+                span,
             ));
-            let result = Value::Thunk(Thunk::new_suspended_call(result, value, span.clone()));
+            let result = Value::Thunk(Thunk::new_suspended_call(result, value, span));
 
             out.insert(key, result);
         }
@@ -1057,7 +1043,7 @@ mod pure_builtins {
                         // and then a compressor name will be extracted from that.
                         grp.map(|g| Value::from(g.as_str())).unwrap_or(Value::Null)
                     })
-                    .collect::<imbl::Vector<Value>>()
+                    .collect::<Vec<Value>>()
                     .into(),
             )),
             None => Ok(Value::Null),
@@ -1100,17 +1086,17 @@ mod pure_builtins {
 
     #[builtin("partition")]
     async fn builtin_partition(co: GenCo, pred: Value, list: Value) -> Result<Value, ErrorKind> {
-        let mut right: imbl::Vector<Value> = Default::default();
-        let mut wrong: imbl::Vector<Value> = Default::default();
+        let mut right: Vec<Value> = Default::default();
+        let mut wrong: Vec<Value> = Default::default();
 
         let list: NixList = list.to_list()?;
         for elem in list {
             let result = generators::request_call_with(&co, pred.clone(), [elem.clone()]).await;
 
             if try_value!(generators::request_force(&co, result).await).as_bool()? {
-                right.push_back(elem);
+                right.push(elem);
             } else {
-                wrong.push_back(elem);
+                wrong.push(elem);
             };
         }
 
@@ -1133,7 +1119,7 @@ mod pure_builtins {
             .to_list()?
             .into_iter()
             .map(|v| v.to_str())
-            .collect::<Result<HashSet<_>, _>>()?;
+            .collect::<Result<FxHashSet<_>, _>>()?;
         let res = attrs.iter().filter_map(|(k, v)| {
             if !keys.contains(k) {
                 Some((k.clone(), v.clone()))
@@ -1169,8 +1155,8 @@ mod pure_builtins {
         let mut empty_string_replace = false;
         let mut context = NixContext::new();
 
-        if let Some(string_context) = string.context_mut() {
-            context = context.join(string_context);
+        if let Some(string_context) = string.take_context() {
+            context.extend(string_context.into_iter());
         }
 
         // This can't be implemented using Rust's string.replace() as
@@ -1200,8 +1186,8 @@ mod pure_builtins {
                 if string[i..i + from.len()] == *from {
                     res.push_str(&to);
                     i += from.len();
-                    if let Some(to_ctx) = to.context_mut() {
-                        context = context.join(to_ctx);
+                    if let Some(to_ctx) = to.take_context() {
+                        context.extend(to_ctx.into_iter());
                     }
 
                     // remember if we applied the empty from->to
@@ -1232,8 +1218,8 @@ mod pure_builtins {
 
             if from.is_empty() {
                 res.push_str(&to);
-                if let Some(to_ctx) = to.context_mut() {
-                    context = context.join(to_ctx);
+                if let Some(to_ctx) = to.take_context() {
+                    context.extend(to_ctx.into_iter());
                 }
                 break;
             }
@@ -1265,12 +1251,12 @@ mod pure_builtins {
         let re = Regex::new(re.to_str()?).unwrap();
         let mut capture_locations = re.capture_locations();
         let num_captures = capture_locations.len();
-        let mut ret = imbl::Vector::new();
+        let mut ret = Vec::new();
         let mut pos = 0;
 
         while let Some(thematch) = re.captures_read_at(&mut capture_locations, text, pos) {
             // push the unmatched characters preceding the match
-            ret.push_back(Value::from(NixString::new_inherit_context_from(
+            ret.push(Value::from(NixString::new_inherit_context_from(
                 &s,
                 &text[pos..thematch.start()],
             )));
@@ -1279,7 +1265,7 @@ mod pure_builtins {
             // group in the regex, containing the characters
             // matched by that capture group, or null if no match.
             // We skip capture 0; it represents the whole match.
-            let v: imbl::Vector<Value> = (1..num_captures)
+            let v: Vec<Value> = (1..num_captures)
                 .map(|i| capture_locations.get(i))
                 .map(|o| {
                     o.map(|(start, end)| {
@@ -1290,14 +1276,17 @@ mod pure_builtins {
                     .unwrap_or(Value::Null)
                 })
                 .collect();
-            ret.push_back(Value::List(NixList::from(v)));
+            ret.push(Value::List(NixList::from(v)));
+            if pos == text.len() {
+                break;
+            }
             pos = thematch.end();
         }
 
         // push the unmatched characters following the last match
         // Here, a surprising thing happens: we silently discard the original
         // context. This is as intended, Nix does the same.
-        ret.push_back(Value::from(&text[pos..]));
+        ret.push(Value::from(&text[pos..]));
 
         Ok(Value::List(NixList::from(ret)))
     }
@@ -1504,15 +1493,19 @@ mod pure_builtins {
         }
 
         let mut buf: Vec<u8> = vec![];
-        to_xml::value_to_xml(&mut buf, &value)?;
-        Ok(String::from_utf8(buf)?.into())
-    }
-
-    #[builtin("placeholder")]
-    async fn builtin_placeholder(co: GenCo, #[lazy] _x: Value) -> Result<Value, ErrorKind> {
-        generators::emit_warning_kind(&co, WarningKind::NotImplemented("builtins.placeholder"))
-            .await;
-        Ok("<builtins.placeholder-is-not-implemented-in-tvix-yet>".into())
+        let context = to_xml::value_to_xml(&mut buf, &value)?;
+
+        Ok((
+            buf,
+            // FUTUREWORK: We have a distinction between an empty context, and
+            // no context at all. Fix this.
+            if !context.is_empty() {
+                Some(Box::new(context))
+            } else {
+                None
+            },
+        )
+            .into())
     }
 
     #[builtin("trace")]
@@ -1629,6 +1622,8 @@ pub fn pure_builtins() -> Vec<(&'static str, Value)> {
 
 #[builtins]
 mod placeholder_builtins {
+    use crate::NixContext;
+
     use super::*;
 
     #[builtin("unsafeDiscardStringContext")]
@@ -1677,24 +1672,17 @@ mod placeholder_builtins {
             .to_contextful_str()?;
 
         // If there's any context, we will swap any ... by a path one.
-        if let Some(ctx) = v.context_mut() {
-            let new_context: tvix_eval::NixContext = ctx
-                .iter()
-                .map(|elem| match elem {
-                    // FUTUREWORK(performance): ideally, we should either:
-                    // (a) do interior mutation of the existing context.
-                    // (b) let the structural sharing make those clones cheap.
-                    crate::NixContextElement::Derivation(drv_path) => {
-                        crate::NixContextElement::Plain(drv_path.to_string())
-                    }
-                    elem => elem.clone(),
-                })
-                .collect::<HashSet<_>>()
-                .into();
+        if let Some(c) = v.take_context() {
+            let mut context = NixContext::new();
+            context.extend(c.into_iter().map(|elem| match elem {
+                crate::NixContextElement::Derivation(drv_path) => {
+                    crate::NixContextElement::Plain(drv_path.to_string())
+                }
+                elem => elem.clone(),
+            }));
 
-            *ctx = new_context;
+            return Ok(Value::String(NixString::new_context_from(context, v)));
         }
-
         Ok(Value::from(v))
     }
 
diff --git a/tvix/eval/src/builtins/to_xml.rs b/tvix/eval/src/builtins/to_xml.rs
index bb12cebfc9d0..093e127fe25e 100644
--- a/tvix/eval/src/builtins/to_xml.rs
+++ b/tvix/eval/src/builtins/to_xml.rs
@@ -3,112 +3,98 @@
 //! things in nixpkgs rely on.
 
 use bstr::ByteSlice;
+use std::borrow::Cow;
 use std::{io::Write, rc::Rc};
-use xml::writer::events::XmlEvent;
-use xml::writer::EmitterConfig;
-use xml::writer::EventWriter;
 
-use crate::{ErrorKind, Value};
+use crate::{ErrorKind, NixContext, NixContextElement, Value};
 
 /// Recursively serialise a value to XML. The value *must* have been
 /// deep-forced before being passed to this function.
-pub fn value_to_xml<W: Write>(mut writer: W, value: &Value) -> Result<(), ErrorKind> {
-    let config = EmitterConfig {
-        perform_indent: true,
-        pad_self_closing: true,
-
-        // Nix uses single-quotes *only* in the document declaration,
-        // so we need to write it manually.
-        write_document_declaration: false,
-        ..Default::default()
-    };
-
+/// On success, returns the NixContext.
+pub fn value_to_xml<W: Write>(mut writer: W, value: &Value) -> Result<NixContext, ErrorKind> {
     // Write a literal document declaration, using C++-Nix-style
     // single quotes.
     writeln!(writer, "<?xml version='1.0' encoding='utf-8'?>")?;
 
-    let mut writer = EventWriter::new_with_config(writer, config);
-
-    writer.write(XmlEvent::start_element("expr"))?;
-    value_variant_to_xml(&mut writer, value)?;
-    writer.write(XmlEvent::end_element())?;
+    let mut emitter = XmlEmitter::new(writer);
 
-    // Unwrap the writer to add the final newline that C++ Nix adds.
-    writeln!(writer.into_inner())?;
+    emitter.write_open_tag("expr", &[])?;
+    value_variant_to_xml(&mut emitter, value)?;
+    emitter.write_closing_tag("expr")?;
 
-    Ok(())
+    Ok(emitter.into_context())
 }
 
 fn write_typed_value<W: Write, V: ToString>(
-    w: &mut EventWriter<W>,
-    name: &str,
+    w: &mut XmlEmitter<W>,
+    name_unescaped: &str,
     value: V,
 ) -> Result<(), ErrorKind> {
-    w.write(XmlEvent::start_element(name).attr("value", &value.to_string()))?;
-    w.write(XmlEvent::end_element())?;
+    w.write_self_closing_tag(name_unescaped, &[("value", &value.to_string())])?;
     Ok(())
 }
 
-fn value_variant_to_xml<W: Write>(w: &mut EventWriter<W>, value: &Value) -> Result<(), ErrorKind> {
+fn value_variant_to_xml<W: Write>(w: &mut XmlEmitter<W>, value: &Value) -> Result<(), ErrorKind> {
     match value {
         Value::Thunk(t) => return value_variant_to_xml(w, &t.value()),
 
         Value::Null => {
-            w.write(XmlEvent::start_element("null"))?;
-            w.write(XmlEvent::end_element())
+            w.write_open_tag("null", &[])?;
+            w.write_closing_tag("null")?;
         }
 
         Value::Bool(b) => return write_typed_value(w, "bool", b),
         Value::Integer(i) => return write_typed_value(w, "int", i),
         Value::Float(f) => return write_typed_value(w, "float", f),
-        Value::String(s) => return write_typed_value(w, "string", s.to_str()?),
+        Value::String(s) => {
+            if let Some(context) = s.context() {
+                w.extend_context(context.iter().cloned());
+            }
+            return write_typed_value(w, "string", s.to_str()?);
+        }
         Value::Path(p) => return write_typed_value(w, "path", p.to_string_lossy()),
 
         Value::List(list) => {
-            w.write(XmlEvent::start_element("list"))?;
+            w.write_open_tag("list", &[])?;
 
             for elem in list.into_iter() {
                 value_variant_to_xml(w, elem)?;
             }
 
-            w.write(XmlEvent::end_element())
+            w.write_closing_tag("list")?;
         }
 
         Value::Attrs(attrs) => {
-            w.write(XmlEvent::start_element("attrs"))?;
+            w.write_open_tag("attrs", &[])?;
 
             for elem in attrs.iter() {
-                w.write(XmlEvent::start_element("attr").attr("name", &elem.0.to_str_lossy()))?;
+                w.write_open_tag("attr", &[("name", &elem.0.to_str_lossy())])?;
                 value_variant_to_xml(w, elem.1)?;
-                w.write(XmlEvent::end_element())?;
+                w.write_closing_tag("attr")?;
             }
 
-            w.write(XmlEvent::end_element())
+            w.write_closing_tag("attrs")?;
         }
 
         Value::Closure(c) => {
-            w.write(XmlEvent::start_element("function"))?;
+            w.write_open_tag("function", &[])?;
 
             match &c.lambda.formals {
                 Some(formals) => {
-                    let mut attrspat = XmlEvent::start_element("attrspat");
+                    let mut attrs: Vec<(&str, &str)> = Vec::with_capacity(2);
                     if formals.ellipsis {
-                        attrspat = attrspat.attr("ellipsis", "1");
+                        attrs.push(("ellipsis", "1"));
                     }
                     if let Some(ref name) = &formals.name {
-                        attrspat = attrspat.attr("name", name.as_str());
+                        attrs.push(("name", name.as_str()));
                     }
 
-                    w.write(attrspat)?;
-
+                    w.write_open_tag("attrspat", &attrs)?;
                     for arg in formals.arguments.iter() {
-                        w.write(
-                            XmlEvent::start_element("attr").attr("name", &arg.0.to_str_lossy()),
-                        )?;
-                        w.write(XmlEvent::end_element())?;
+                        w.write_self_closing_tag("attr", &[("name", &arg.0.to_str_lossy())])?;
                     }
 
-                    w.write(XmlEvent::end_element())?;
+                    w.write_closing_tag("attrspat")?;
                 }
                 None => {
                     // TODO(tazjin): tvix does not currently persist function
@@ -120,17 +106,16 @@ fn value_variant_to_xml<W: Write>(w: &mut EventWriter<W>, value: &Value) -> Resu
                     // If we don't want to persist the data, we can re-parse the
                     // AST from the spans of the lambda's bytecode and figure it
                     // out that way, but it needs some investigating.
-                    w.write(XmlEvent::start_element("varpat").attr("name", /* fake: */ "x"))?;
-                    w.write(XmlEvent::end_element())?;
+                    w.write_self_closing_tag("varpat", &[("name", /* fake: */ "x")])?;
                 }
             }
 
-            w.write(XmlEvent::end_element())
+            w.write_closing_tag("function")?;
         }
 
         Value::Builtin(_) => {
-            w.write(XmlEvent::start_element("unevaluated"))?;
-            w.write(XmlEvent::end_element())
+            w.write_open_tag("unevaluated", &[])?;
+            w.write_closing_tag("unevaluated")?;
         }
 
         Value::AttrNotFound
@@ -148,7 +133,189 @@ fn value_variant_to_xml<W: Write>(w: &mut EventWriter<W>, value: &Value) -> Resu
         Value::Catchable(_) => {
             panic!("tvix bug: value_to_xml() called on a value which had not been deep-forced")
         }
-    }?;
+    };
 
     Ok(())
 }
+
+/// A simple-stupid XML emitter, which implements only the subset needed for byte-by-byte compat with C++ nixโ€™ `builtins.toXML`.
+struct XmlEmitter<W> {
+    /// The current indentation
+    cur_indent: usize,
+    writer: W,
+    context: NixContext,
+}
+
+impl<W: Write> XmlEmitter<W> {
+    pub fn new(writer: W) -> Self {
+        XmlEmitter {
+            cur_indent: 0,
+            writer,
+            context: Default::default(),
+        }
+    }
+
+    /// Write an open tag with the given name (which is not escaped!)
+    /// and attributes (Keys are not escaped! Only attribute values are.)
+    pub fn write_open_tag(
+        &mut self,
+        name_unescaped: &str,
+        attrs: &[(&str, &str)],
+    ) -> std::io::Result<()> {
+        self.add_indent()?;
+        self.writer.write_all(b"<")?;
+        self.writer.write_all(name_unescaped.as_bytes())?;
+        self.write_attrs_escape_vals(attrs)?;
+        self.writer.write_all(b">\n")?;
+        self.cur_indent += 2;
+        Ok(())
+    }
+
+    /// Write a self-closing open tag with the given name (which is not escaped!)
+    /// and attributes (Keys are not escaped! Only attribute values are.)
+    pub fn write_self_closing_tag(
+        &mut self,
+        name_unescaped: &str,
+        attrs: &[(&str, &str)],
+    ) -> std::io::Result<()> {
+        self.add_indent()?;
+        self.writer.write_all(b"<")?;
+        self.writer.write_all(name_unescaped.as_bytes())?;
+        self.write_attrs_escape_vals(attrs)?;
+        self.writer.write_all(b" />\n")?;
+        Ok(())
+    }
+
+    /// Write a closing tag with the given name (which is not escaped!)
+    pub fn write_closing_tag(&mut self, name_unescaped: &str) -> std::io::Result<()> {
+        self.cur_indent -= 2;
+        self.add_indent()?;
+        self.writer.write_all(b"</")?;
+        self.writer.write_all(name_unescaped.as_bytes())?;
+        self.writer.write_all(b">\n")?;
+        Ok(())
+    }
+
+    #[inline]
+    fn add_indent(&mut self) -> std::io::Result<()> {
+        self.writer.write_all(&b" ".repeat(self.cur_indent))
+    }
+
+    /// Write an attribute list
+    fn write_attrs_escape_vals(&mut self, attrs: &[(&str, &str)]) -> std::io::Result<()> {
+        for (name, val) in attrs {
+            self.writer.write_all(b" ")?;
+            self.writer.write_all(name.as_bytes())?;
+            self.writer.write_all(br#"=""#)?;
+            self.writer
+                .write_all(Self::escape_attr_value(val).as_bytes())?;
+            self.writer.write_all(b"\"")?;
+        }
+        Ok(())
+    }
+
+    /// Escape the given attribute value, making sure we only actually clone the string if we needed to replace something.
+    fn escape_attr_value(s: &str) -> Cow<str> {
+        let mut last_escape: usize = 0;
+        let mut res: Cow<str> = Cow::Borrowed("");
+        // iterating via char_indices gives us the ability to index the original string slice at character boundaries
+        for (idx, c) in s.char_indices() {
+            match Self::should_escape_char(c) {
+                None => {}
+                Some(new) => {
+                    // add characters since the last escape we did
+                    res += &s[last_escape..idx];
+                    // add the escaped value
+                    res += new;
+                    last_escape = idx + 1;
+                }
+            }
+        }
+        // we did not need to escape anything, so borrow original string
+        if last_escape == 0 {
+            Cow::Borrowed(s)
+        } else {
+            // add the remaining characters
+            res += &s[last_escape..];
+            res
+        }
+    }
+
+    fn should_escape_char(c: char) -> Option<&'static str> {
+        match c {
+            '<' => Some("&lt;"),
+            '>' => Some("&gt;"),
+            '"' => Some("&quot;"),
+            '\'' => Some("&apos;"),
+            '&' => Some("&amp;"),
+            '\n' => Some("&#xA;"),
+            '\r' => Some("&#xD;"),
+            _ => None,
+        }
+    }
+
+    /// Extends the existing context with more context elements.
+    fn extend_context<T>(&mut self, iter: T)
+    where
+        T: IntoIterator<Item = NixContextElement>,
+    {
+        self.context.extend(iter)
+    }
+
+    /// Consumes [Self] and returns the [NixContext] collected.
+    fn into_context(self) -> NixContext {
+        self.context
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use bytes::buf::Writer;
+    use pretty_assertions::assert_eq;
+
+    use crate::builtins::to_xml::XmlEmitter;
+    use std::borrow::Cow;
+
+    #[test]
+    fn xml_gen() {
+        let mut buf = Vec::new();
+        let mut x = XmlEmitter::new(&mut buf);
+        x.write_open_tag("hello", &[("hi", "itโ€™s me"), ("no", "<escape>")])
+            .unwrap();
+        x.write_self_closing_tag("self-closing", &[("tag", "yay")])
+            .unwrap();
+        x.write_closing_tag("hello").unwrap();
+
+        assert_eq!(
+            std::str::from_utf8(&buf).unwrap(),
+            r##"<hello hi="itโ€™s me" no="&lt;escape&gt;">
+  <self-closing tag="yay" />
+</hello>
+"##
+        );
+    }
+
+    #[test]
+    fn xml_escape() {
+        match XmlEmitter::<Writer<Vec<u8>>>::escape_attr_value("ab<>c&de") {
+            Cow::Owned(s) => assert_eq!(s, "ab&lt;&gt;c&amp;de".to_string(), "escape stuff"),
+            Cow::Borrowed(s) => panic!("s should be owned {}", s),
+        }
+        match XmlEmitter::<Writer<Vec<u8>>>::escape_attr_value("") {
+            Cow::Borrowed(s) => assert_eq!(s, "", "empty escape is borrowed"),
+            Cow::Owned(s) => panic!("s should be borrowed {}", s),
+        }
+        match XmlEmitter::<Writer<Vec<u8>>>::escape_attr_value("hi!ลทbla") {
+            Cow::Borrowed(s) => assert_eq!(s, "hi!ลทbla", "no escape is borrowed"),
+            Cow::Owned(s) => panic!("s should be borrowed {}", s),
+        }
+        match XmlEmitter::<Writer<Vec<u8>>>::escape_attr_value("hi!<ลท>bla") {
+            Cow::Owned(s) => assert_eq!(
+                s,
+                "hi!&lt;ลท&gt;bla".to_string(),
+                "multi-byte chars are correctly used"
+            ),
+            Cow::Borrowed(s) => panic!("s should be owned {}", s),
+        }
+    }
+}
diff --git a/tvix/eval/src/chunk.rs b/tvix/eval/src/chunk.rs
index f1a35a6ce162..2a5446a782ed 100644
--- a/tvix/eval/src/chunk.rs
+++ b/tvix/eval/src/chunk.rs
@@ -1,9 +1,10 @@
+use crate::opcode::{CodeIdx, ConstantIdx, Op, OpArg};
+use crate::value::Value;
+use crate::{CoercionKind, SourceCode};
 use std::io::Write;
-use std::ops::{Index, IndexMut};
 
-use crate::opcode::{CodeIdx, ConstantIdx, OpCode};
-use crate::value::Value;
-use crate::SourceCode;
+/// Maximum size of a u64 encoded in the vu128 varint encoding.
+const U64_VARINT_SIZE: usize = 9;
 
 /// Represents a source location from which one or more operations
 /// were compiled.
@@ -30,39 +31,69 @@ struct SourceSpan {
 /// emitted by the compiler.
 #[derive(Debug, Default)]
 pub struct Chunk {
-    pub code: Vec<OpCode>,
+    pub code: Vec<u8>,
     pub constants: Vec<Value>,
     spans: Vec<SourceSpan>,
+
+    /// Index of the last operation (i.e. not data) written to the code vector.
+    /// Some operations (e.g. jump patching) need to know this.
+    last_op: usize,
 }
 
-impl Index<ConstantIdx> for Chunk {
-    type Output = Value;
+impl Chunk {
+    pub fn push_op(&mut self, op: Op, span: codemap::Span) -> usize {
+        self.last_op = self.code.len();
+        self.code.push(op as u8);
+        self.push_span(span, self.last_op);
+        self.last_op
+    }
 
-    fn index(&self, index: ConstantIdx) -> &Self::Output {
-        &self.constants[index.0]
+    pub fn push_uvarint(&mut self, data: u64) {
+        let mut encoded = [0u8; U64_VARINT_SIZE];
+        let bytes_written = vu128::encode_u64(&mut encoded, data);
+        self.code.extend_from_slice(&encoded[..bytes_written]);
     }
-}
 
-impl Index<CodeIdx> for Chunk {
-    type Output = OpCode;
+    pub fn read_uvarint(&self, idx: usize) -> (u64, usize) {
+        debug_assert!(
+            idx < self.code.len(),
+            "invalid bytecode (missing varint operand)",
+        );
 
-    fn index(&self, index: CodeIdx) -> &Self::Output {
-        &self.code[index.0]
+        if self.code.len() - idx >= U64_VARINT_SIZE {
+            vu128::decode_u64(
+                &self.code[idx..idx + U64_VARINT_SIZE]
+                    .try_into()
+                    .expect("size statically checked"),
+            )
+        } else {
+            let mut tmp = [0u8; U64_VARINT_SIZE];
+            tmp[..self.code.len() - idx].copy_from_slice(&self.code[idx..]);
+            vu128::decode_u64(&tmp)
+        }
     }
-}
 
-impl IndexMut<CodeIdx> for Chunk {
-    fn index_mut(&mut self, index: CodeIdx) -> &mut Self::Output {
-        &mut self.code[index.0]
+    pub fn push_u16(&mut self, data: u16) {
+        self.code.extend_from_slice(&data.to_le_bytes())
     }
-}
 
-impl Chunk {
-    pub fn push_op(&mut self, data: OpCode, span: codemap::Span) -> CodeIdx {
-        let idx = self.code.len();
-        self.code.push(data);
-        self.push_span(span, idx);
-        CodeIdx(idx)
+    /// Patches the argument to the jump operand of the jump at the given index
+    /// to point to the *next* instruction that will be emitted.
+    pub fn patch_jump(&mut self, idx: usize) {
+        let offset = (self.code.len() - idx - /* arg idx = */ 1 - /* jump arg size = */ 2) as u16;
+        self.code[idx + 1..idx + 3].copy_from_slice(&offset.to_le_bytes())
+    }
+
+    pub fn read_u16(&self, idx: usize) -> u16 {
+        if idx + 2 > self.code.len() {
+            panic!("Tvix bug: invalid bytecode (expected u16 operand not found)")
+        }
+
+        let byte_array: &[u8; 2] = &self.code[idx..idx + 2]
+            .try_into()
+            .expect("fixed-size slice can not fail to convert to array");
+
+        u16::from_le_bytes(*byte_array)
     }
 
     /// Get the first span of a chunk, no questions asked.
@@ -70,23 +101,13 @@ impl Chunk {
         self.spans[0].span
     }
 
-    /// Return a reference to the last op in the chunk, if any
-    pub fn last_op(&self) -> Option<&OpCode> {
-        self.code.last()
-    }
-
-    /// Pop the last operation from the chunk and clean up its tracked
-    /// span. Used when the compiler backtracks.
-    pub fn pop_op(&mut self) {
-        // Simply drop the last op.
-        self.code.pop();
-
-        if let Some(span) = self.spans.last() {
-            // If the last span started at this op, drop it.
-            if span.start == self.code.len() {
-                self.spans.pop();
-            }
+    /// Return the last op in the chunk together with its index, if any.
+    pub fn last_op(&self) -> Option<(Op, usize)> {
+        if self.code.is_empty() {
+            return None;
         }
+
+        Some((self.code[self.last_op].into(), self.last_op))
     }
 
     pub fn push_constant(&mut self, data: Value) -> ConstantIdx {
@@ -100,8 +121,6 @@ impl Chunk {
         self.constants.get(constant.0)
     }
 
-    // Span tracking implementation
-
     fn push_span(&mut self, span: codemap::Span, start: usize) {
         match self.spans.last_mut() {
             // We do not need to insert the same span again, as this
@@ -136,66 +155,88 @@ impl Chunk {
     }
 
     /// Write the disassembler representation of the operation at
-    /// `idx` to the specified writer.
+    /// `idx` to the specified writer, and return how many bytes in the code to
+    /// skip for the next op.
     pub fn disassemble_op<W: Write>(
         &self,
         writer: &mut W,
         source: &SourceCode,
         width: usize,
         idx: CodeIdx,
-    ) -> Result<(), std::io::Error> {
+    ) -> Result<usize, std::io::Error> {
         write!(writer, "{:#width$x}\t ", idx.0, width = width)?;
 
         // Print continuation character if the previous operation was at
         // the same line, otherwise print the line.
         let line = source.get_line(self.get_span(idx));
-        if idx.0 > 0 && source.get_line(self.get_span(CodeIdx(idx.0 - 1))) == line {
+        if idx.0 > 0 && source.get_line(self.get_span(idx - 1)) == line {
             write!(writer, "   |\t")?;
         } else {
             write!(writer, "{:4}\t", line)?;
         }
 
-        match self[idx] {
-            OpCode::OpConstant(idx) => {
-                let val_str = match &self[idx] {
-                    Value::Thunk(t) => t.debug_repr(),
-                    Value::Closure(c) => format!("closure({:p})", c.lambda),
-                    val => format!("{}", val),
-                };
+        let _fmt_constant = |idx: ConstantIdx| match &self.constants[idx.0] {
+            Value::Thunk(t) => t.debug_repr(),
+            Value::Closure(c) => format!("closure({:p})", c.lambda),
+            Value::Blueprint(b) => format!("blueprint({:p})", b),
+            val => format!("{}", val),
+        };
 
-                writeln!(writer, "OpConstant({}@{})", val_str, idx.0)
+        let op: Op = self.code[idx.0].into();
+
+        match op.arg_type() {
+            OpArg::None => {
+                writeln!(writer, "Op{:?}", op)?;
+                Ok(1)
             }
-            op => writeln!(writer, "{:?}", op),
-        }?;
 
-        Ok(())
-    }
+            OpArg::Fixed => {
+                let arg = self.read_u16(idx.0 + 1);
+                writeln!(writer, "Op{:?}({})", op, arg)?;
+                Ok(3)
+            }
 
-    /// Extend this chunk with the content of another, moving out of the other
-    /// in the process.
-    ///
-    /// This is used by the compiler when it detects that it unnecessarily
-    /// thunked a nested expression.
-    pub fn extend(&mut self, other: Self) {
-        // Some operations need to be modified in certain ways before being
-        // valid as part of the new chunk.
-        let const_count = self.constants.len();
-        for (idx, op) in other.code.iter().enumerate() {
-            let span = other.get_span(CodeIdx(idx));
-            match op {
-                // As the constants shift, the index needs to be moved relatively.
-                OpCode::OpConstant(ConstantIdx(idx)) => {
-                    self.push_op(OpCode::OpConstant(ConstantIdx(idx + const_count)), span)
+            OpArg::Uvarint => {
+                let (arg, size) = self.read_uvarint(idx.0 + 1);
+                writeln!(writer, "Op{:?}({})", op, arg)?;
+                Ok(1 + size)
+            }
+
+            _ => match op {
+                Op::CoerceToString => {
+                    let kind: CoercionKind = self.code[idx.0 + 1].into();
+                    writeln!(writer, "Op{:?}({:?})", op, kind)?;
+                    Ok(2)
                 }
 
-                // Other operations either operate on relative offsets, or no
-                // offsets, and are safe to keep as-is.
-                _ => self.push_op(*op, span),
-            };
-        }
+                Op::Closure | Op::ThunkClosure | Op::ThunkSuspended => {
+                    let mut cidx = idx.0 + 1;
 
-        self.constants.extend(other.constants);
-        self.spans.extend(other.spans);
+                    let (bp_idx, size) = self.read_uvarint(cidx);
+                    cidx += size;
+
+                    let (packed_count, size) = self.read_uvarint(cidx);
+                    cidx += size;
+
+                    let captures_with = packed_count & 0b1 == 1;
+                    let count = packed_count >> 1;
+
+                    write!(writer, "Op{:?}(BP @ {}, ", op, bp_idx)?;
+                    if captures_with {
+                        write!(writer, "captures with, ")?;
+                    }
+                    writeln!(writer, "{} upvalues)", count)?;
+
+                    for _ in 0..count {
+                        let (_, size) = self.read_uvarint(cidx);
+                        cidx += size;
+                    }
+
+                    Ok(cidx - idx.0)
+                }
+                _ => panic!("Tvix bug: don't know how to format argument for Op{:?}", op),
+            },
+        }
     }
 }
 
@@ -211,79 +252,49 @@ mod tests {
     #[test]
     fn push_op() {
         let mut chunk = Chunk::default();
-        chunk.push_op(OpCode::OpAdd, dummy_span());
-        assert_eq!(chunk.code.last().unwrap(), &OpCode::OpAdd);
+        let idx = chunk.push_op(Op::Add, dummy_span());
+        assert_eq!(*chunk.code.last().unwrap(), Op::Add as u8);
+        assert_eq!(chunk.code[idx], Op::Add as u8);
     }
 
     #[test]
-    fn extend_empty() {
+    fn push_op_with_arg() {
         let mut chunk = Chunk::default();
-        chunk.push_op(OpCode::OpAdd, dummy_span());
+        let mut idx = chunk.push_op(Op::Constant, dummy_span());
+        chunk.push_uvarint(42);
 
-        let other = Chunk::default();
-        chunk.extend(other);
+        assert_eq!(chunk.code[idx], Op::Constant as u8);
 
-        assert_eq!(
-            chunk.code,
-            vec![OpCode::OpAdd],
-            "code should not have changed"
-        );
+        idx += 1;
+        let (arg, size) = chunk.read_uvarint(idx);
+        assert_eq!(idx + size, chunk.code.len());
+        assert_eq!(arg, 42);
     }
 
     #[test]
-    fn extend_simple() {
-        let span = dummy_span();
+    fn push_jump() {
         let mut chunk = Chunk::default();
-        chunk.push_op(OpCode::OpAdd, span);
 
-        let mut other = Chunk::default();
-        other.push_op(OpCode::OpSub, span);
-        other.push_op(OpCode::OpMul, span);
+        chunk.push_op(Op::Constant, dummy_span());
+        chunk.push_uvarint(0);
 
-        let expected_code = vec![OpCode::OpAdd, OpCode::OpSub, OpCode::OpMul];
+        let idx = chunk.push_op(Op::Jump, dummy_span());
+        chunk.push_u16(0);
 
-        chunk.extend(other);
+        chunk.push_op(Op::Constant, dummy_span());
+        chunk.push_uvarint(1);
 
-        assert_eq!(chunk.code, expected_code, "code should have been extended");
-    }
+        chunk.patch_jump(idx);
+        chunk.push_op(Op::Return, dummy_span());
 
-    #[test]
-    fn extend_with_constant() {
-        let span = dummy_span();
-        let mut chunk = Chunk::default();
-        chunk.push_op(OpCode::OpAdd, span);
-        let cidx = chunk.push_constant(Value::Integer(0));
-        assert_eq!(
-            cidx.0, 0,
-            "first constant in main chunk should have index 0"
-        );
-        chunk.push_op(OpCode::OpConstant(cidx), span);
-
-        let mut other = Chunk::default();
-        other.push_op(OpCode::OpSub, span);
-        let other_cidx = other.push_constant(Value::Integer(1));
-        assert_eq!(
-            other_cidx.0, 0,
-            "first constant in other chunk should have index 0"
-        );
-        other.push_op(OpCode::OpConstant(other_cidx), span);
-
-        chunk.extend(other);
-
-        let expected_code = vec![
-            OpCode::OpAdd,
-            OpCode::OpConstant(ConstantIdx(0)),
-            OpCode::OpSub,
-            OpCode::OpConstant(ConstantIdx(1)), // <- note: this was rewritten
+        #[rustfmt::skip]
+        let expected: Vec<u8> = vec![
+            Op::Constant as u8, 0,
+            Op::Jump as u8, 2, 0,
+            Op::Constant as u8, 1,
+            Op::Return as u8,
         ];
 
-        assert_eq!(
-            chunk.code, expected_code,
-            "code should have been extended and rewritten"
-        );
-
-        assert_eq!(chunk.constants.len(), 2);
-        assert!(matches!(chunk.constants[0], Value::Integer(0)));
-        assert!(matches!(chunk.constants[1], Value::Integer(1)));
+        assert_eq!(chunk.code, expected);
     }
 }
diff --git a/tvix/eval/src/compiler/bindings.rs b/tvix/eval/src/compiler/bindings.rs
index 634cc5402247..6a3ae485936c 100644
--- a/tvix/eval/src/compiler/bindings.rs
+++ b/tvix/eval/src/compiler/bindings.rs
@@ -9,6 +9,8 @@ use std::iter::Peekable;
 use rnix::ast::HasEntry;
 use rowan::ast::AstChildren;
 
+use crate::spans::{EntireFile, OrEntireFile};
+
 use super::*;
 
 type PeekableAttrs = Peekable<AstChildren<ast::Attr>>;
@@ -556,6 +558,15 @@ impl Compiler<'_, '_> {
         self.scope_mut().end_scope();
     }
 
+    /// Emit definitions for all variables in the top-level global env passed to the evaluation (eg
+    /// local variables in the REPL)
+    pub(super) fn compile_env(&mut self, env: &FxHashMap<SmolStr, Value>) {
+        for (name, value) in env {
+            self.scope_mut().declare_constant(name.to_string());
+            self.emit_constant(value.clone(), &EntireFile);
+        }
+    }
+
     /// Actually binds all tracked bindings by emitting the bytecode that places
     /// them in their stack slots.
     fn bind_values(&mut self, bindings: TrackedBindings) {
@@ -569,7 +580,7 @@ impl Compiler<'_, '_> {
 
                 KeySlot::Static { slot, name } => {
                     let span = self.scope()[slot].span;
-                    self.emit_constant(name.as_str().into(), &span);
+                    self.emit_constant(name.as_str().into(), &OrEntireFile(span));
                     self.scope_mut().mark_initialised(slot);
                 }
 
@@ -594,7 +605,7 @@ impl Compiler<'_, '_> {
                         c.emit_force(&namespace);
 
                         c.emit_constant(name.as_str().into(), &span);
-                        c.push_op(OpCode::OpAttrsSelect, &span);
+                        c.push_op(Op::AttrsSelect, &span);
                     })
                 }
 
@@ -621,7 +632,8 @@ impl Compiler<'_, '_> {
             if self.scope()[idx].needs_finaliser {
                 let stack_idx = self.scope().stack_index(idx);
                 let span = self.scope()[idx].span;
-                self.push_op(OpCode::OpFinalise(stack_idx), &span);
+                self.push_op(Op::Finalise, &OrEntireFile(span));
+                self.push_uvarint(stack_idx.0 as u64)
             }
         }
     }
@@ -656,11 +668,8 @@ impl Compiler<'_, '_> {
         self.bind_values(bindings);
 
         if kind.is_attrs() {
-            self.push_op(OpCode::OpAttrs(Count(count)), node);
-        }
-
-        if count == 0 {
-            self.unthunk();
+            self.push_op(Op::Attrs, node);
+            self.push_uvarint(count as u64);
         }
     }
 
@@ -686,7 +695,7 @@ impl Compiler<'_, '_> {
         self.scope_mut().end_scope();
 
         self.emit_constant("body".into(), node);
-        self.push_op(OpCode::OpAttrsSelect, node);
+        self.push_op(Op::AttrsSelect, node);
     }
 
     /// Is the given identifier defined *by the user* in any current scope?
@@ -707,8 +716,9 @@ impl Compiler<'_, '_> {
         match self.scope_mut().resolve_local(ident) {
             LocalPosition::Unknown => {
                 // Are we possibly dealing with an upvalue?
-                if let Some(idx) = self.resolve_upvalue(self.contexts.len() - 1, ident, node) {
-                    self.push_op(OpCode::OpGetUpvalue(idx), node);
+                if let Some(idx) = self.resolve_upvalue(self.contexts.len() - 1, ident) {
+                    self.push_op(Op::GetUpvalue, node);
+                    self.push_uvarint(idx.0 as u64);
                     return;
                 }
 
@@ -731,7 +741,7 @@ impl Compiler<'_, '_> {
                     self.thunk(slot, node, |c, _| {
                         c.context_mut().captures_with_stack = true;
                         c.emit_constant(ident.into(), node);
-                        c.push_op(OpCode::OpResolveWith, node);
+                        c.push_op(Op::ResolveWith, node);
                     });
                     return;
                 }
@@ -742,18 +752,17 @@ impl Compiler<'_, '_> {
 
             LocalPosition::Known(idx) => {
                 let stack_idx = self.scope().stack_index(idx);
-                self.push_op(OpCode::OpGetLocal(stack_idx), node);
+                self.push_op(Op::GetLocal, node);
+                self.push_uvarint(stack_idx.0 as u64);
             }
 
             // This identifier is referring to a value from the same scope which
             // is not yet defined. This identifier access must be thunked.
             LocalPosition::Recursive(idx) => self.thunk(slot, node, move |compiler, _| {
-                let upvalue_idx = compiler.add_upvalue(
-                    compiler.contexts.len() - 1,
-                    node,
-                    UpvalueKind::Local(idx),
-                );
-                compiler.push_op(OpCode::OpGetUpvalue(upvalue_idx), node);
+                let upvalue_idx =
+                    compiler.add_upvalue(compiler.contexts.len() - 1, UpvalueKind::Local(idx));
+                compiler.push_op(Op::GetUpvalue, node);
+                compiler.push_uvarint(upvalue_idx.0 as u64);
             }),
         };
     }
@@ -766,12 +775,7 @@ impl Compiler<'_, '_> {
 
 /// Private compiler helpers related to bindings.
 impl Compiler<'_, '_> {
-    fn resolve_upvalue<N: ToSpan>(
-        &mut self,
-        ctx_idx: usize,
-        name: &str,
-        node: &N,
-    ) -> Option<UpvalueIdx> {
+    fn resolve_upvalue(&mut self, ctx_idx: usize, name: &str) -> Option<UpvalueIdx> {
         if ctx_idx == 0 {
             // There can not be any upvalue at the outermost context.
             return None;
@@ -784,7 +788,7 @@ impl Compiler<'_, '_> {
             // stack (i.e. in the right position) *during* their runtime
             // construction
             LocalPosition::Known(idx) | LocalPosition::Recursive(idx) => {
-                return Some(self.add_upvalue(ctx_idx, node, UpvalueKind::Local(idx)))
+                return Some(self.add_upvalue(ctx_idx, UpvalueKind::Local(idx)))
             }
 
             LocalPosition::Unknown => { /* continue below */ }
@@ -792,19 +796,14 @@ impl Compiler<'_, '_> {
 
         // If the upvalue comes from even further up, we need to recurse to make
         // sure that the upvalues are created at each level.
-        if let Some(idx) = self.resolve_upvalue(ctx_idx - 1, name, node) {
-            return Some(self.add_upvalue(ctx_idx, node, UpvalueKind::Upvalue(idx)));
+        if let Some(idx) = self.resolve_upvalue(ctx_idx - 1, name) {
+            return Some(self.add_upvalue(ctx_idx, UpvalueKind::Upvalue(idx)));
         }
 
         None
     }
 
-    fn add_upvalue<N: ToSpan>(
-        &mut self,
-        ctx_idx: usize,
-        node: &N,
-        kind: UpvalueKind,
-    ) -> UpvalueIdx {
+    fn add_upvalue(&mut self, ctx_idx: usize, kind: UpvalueKind) -> UpvalueIdx {
         // If there is already an upvalue closing over the specified index,
         // retrieve that instead.
         for (idx, existing) in self.contexts[ctx_idx].scope.upvalues.iter().enumerate() {
@@ -813,11 +812,7 @@ impl Compiler<'_, '_> {
             }
         }
 
-        let span = self.span_for(node);
-        self.contexts[ctx_idx]
-            .scope
-            .upvalues
-            .push(Upvalue { kind, span });
+        self.contexts[ctx_idx].scope.upvalues.push(Upvalue { kind });
 
         let idx = UpvalueIdx(self.contexts[ctx_idx].lambda.upvalue_count);
         self.contexts[ctx_idx].lambda.upvalue_count += 1;
diff --git a/tvix/eval/src/compiler/import.rs b/tvix/eval/src/compiler/import.rs
index 9036eec81731..862e792df566 100644
--- a/tvix/eval/src/compiler/import.rs
+++ b/tvix/eval/src/compiler/import.rs
@@ -65,6 +65,7 @@ async fn import_impl(
         globals
             .upgrade()
             .expect("globals dropped while still in use"),
+        None,
         &source,
         &file,
         &mut NoOpObserver::default(),
diff --git a/tvix/eval/src/compiler/mod.rs b/tvix/eval/src/compiler/mod.rs
index 60c55dda27b4..33b70b87ce84 100644
--- a/tvix/eval/src/compiler/mod.rs
+++ b/tvix/eval/src/compiler/mod.rs
@@ -20,16 +20,16 @@ mod scope;
 
 use codemap::Span;
 use rnix::ast::{self, AstToken};
+use rustc_hash::FxHashMap;
 use smol_str::SmolStr;
-use std::collections::{BTreeMap, HashMap};
+use std::collections::BTreeMap;
 use std::path::{Path, PathBuf};
 use std::rc::{Rc, Weak};
 
 use crate::chunk::Chunk;
 use crate::errors::{CatchableErrorKind, Error, ErrorKind, EvalResult};
 use crate::observer::CompilerObserver;
-use crate::opcode::{CodeIdx, ConstantIdx, Count, JumpOffset, OpCode, UpvalueIdx};
-use crate::spans::LightSpan;
+use crate::opcode::{CodeIdx, Op, Position, UpvalueIdx};
 use crate::spans::ToSpan;
 use crate::value::{Closure, Formals, Lambda, NixAttrs, Thunk, Value};
 use crate::warnings::{EvalWarning, WarningKind};
@@ -45,11 +45,6 @@ pub struct CompilationOutput {
     pub lambda: Rc<Lambda>,
     pub warnings: Vec<EvalWarning>,
     pub errors: Vec<Error>,
-
-    // This field must outlive the rc::Weak reference which breaks the
-    // builtins -> import -> builtins reference cycle. For this
-    // reason, it must be passed to the VM.
-    pub globals: Rc<GlobalsMap>,
 }
 
 /// Represents the lambda currently being compiled.
@@ -57,7 +52,6 @@ struct LambdaCtx {
     lambda: Lambda,
     scope: Scope,
     captures_with_stack: bool,
-    unthunk: bool,
 }
 
 impl LambdaCtx {
@@ -66,7 +60,6 @@ impl LambdaCtx {
             lambda: Lambda::default(),
             scope: Default::default(),
             captures_with_stack: false,
-            unthunk: false,
         }
     }
 
@@ -75,7 +68,6 @@ impl LambdaCtx {
             lambda: Lambda::default(),
             scope: self.scope.inherit(),
             captures_with_stack: false,
-            unthunk: false,
         }
     }
 }
@@ -123,7 +115,7 @@ impl TrackedFormal {
 
 /// The map of globally available functions and other values that
 /// should implicitly be resolvable in the global scope.
-pub(crate) type GlobalsMap = HashMap<&'static str, Value>;
+pub type GlobalsMap = FxHashMap<&'static str, Value>;
 
 /// Set of builtins that (if they exist) should be made available in
 /// the global scope, meaning that they can be accessed not just
@@ -193,6 +185,7 @@ impl<'source, 'observer> Compiler<'source, 'observer> {
     pub(crate) fn new(
         location: Option<PathBuf>,
         globals: Rc<GlobalsMap>,
+        env: Option<&FxHashMap<SmolStr, Value>>,
         source: &'source SourceCode,
         file: &'source codemap::File,
         observer: &'observer mut dyn CompilerObserver,
@@ -228,7 +221,7 @@ impl<'source, 'observer> Compiler<'source, 'observer> {
         #[cfg(not(target_arch = "wasm32"))]
         debug_assert!(root_dir.is_absolute());
 
-        Ok(Self {
+        let mut compiler = Self {
             root_dir,
             source,
             file,
@@ -238,7 +231,13 @@ impl<'source, 'observer> Compiler<'source, 'observer> {
             warnings: vec![],
             errors: vec![],
             dead_scope: 0,
-        })
+        };
+
+        if let Some(env) = env {
+            compiler.compile_env(env);
+        }
+
+        Ok(compiler)
     }
 }
 
@@ -268,13 +267,37 @@ impl Compiler<'_, '_> {
 
     /// Push a single instruction to the current bytecode chunk and
     /// track the source span from which it was compiled.
-    fn push_op<T: ToSpan>(&mut self, data: OpCode, node: &T) -> CodeIdx {
+    fn push_op<T: ToSpan>(&mut self, data: Op, node: &T) -> CodeIdx {
         if self.dead_scope > 0 {
             return CodeIdx(0);
         }
 
         let span = self.span_for(node);
-        self.chunk().push_op(data, span)
+        CodeIdx(self.chunk().push_op(data, span))
+    }
+
+    fn push_u8(&mut self, data: u8) {
+        if self.dead_scope > 0 {
+            return;
+        }
+
+        self.chunk().code.push(data);
+    }
+
+    fn push_uvarint(&mut self, data: u64) {
+        if self.dead_scope > 0 {
+            return;
+        }
+
+        self.chunk().push_uvarint(data);
+    }
+
+    fn push_u16(&mut self, data: u16) {
+        if self.dead_scope > 0 {
+            return;
+        }
+
+        self.chunk().push_u16(data);
     }
 
     /// Emit a single constant to the current bytecode chunk and track
@@ -285,7 +308,8 @@ impl Compiler<'_, '_> {
         }
 
         let idx = self.chunk().push_constant(value);
-        self.push_op(OpCode::OpConstant(idx), node);
+        self.push_op(Op::Constant, node);
+        self.push_uvarint(idx.0 as u64);
     }
 }
 
@@ -398,7 +422,7 @@ impl Compiler<'_, '_> {
                 Value::UnresolvedPath(Box::new(home_relative_path.into())),
                 node,
             );
-            self.push_op(OpCode::OpResolveHomePath, node);
+            self.push_op(Op::ResolveHomePath, node);
             return;
         } else if raw_path.starts_with('<') {
             // TODO: decide what to do with findFile
@@ -414,7 +438,7 @@ impl Compiler<'_, '_> {
             // Make a thunk to resolve the path (without using `findFile`, at least for now?)
             return self.thunk(slot, node, move |c, _| {
                 c.emit_constant(Value::UnresolvedPath(Box::new(path.into())), node);
-                c.push_op(OpCode::OpFindFile, node);
+                c.push_op(Op::FindFile, node);
             });
         } else {
             let mut buf = self.root_dir.clone();
@@ -450,13 +474,15 @@ impl Compiler<'_, '_> {
                 ast::InterpolPart::Interpolation(ipol) => {
                     self.compile(slot, ipol.expr().unwrap());
                     // implicitly forces as well
-                    self.push_op(
-                        OpCode::OpCoerceToString(CoercionKind {
-                            strong: false,
-                            import_paths: true,
-                        }),
-                        ipol,
-                    );
+                    self.push_op(Op::CoerceToString, ipol);
+
+                    let encoded: u8 = CoercionKind {
+                        strong: false,
+                        import_paths: true,
+                    }
+                    .into();
+
+                    self.push_u8(encoded);
                 }
 
                 ast::InterpolPart::Literal(lit) => {
@@ -466,7 +492,8 @@ impl Compiler<'_, '_> {
         }
 
         if parts.len() != 1 {
-            self.push_op(OpCode::OpInterpolate(Count(parts.len())), parent_node);
+            self.push_op(Op::Interpolate, parent_node);
+            self.push_uvarint(parts.len() as u64);
         }
     }
 
@@ -492,8 +519,8 @@ impl Compiler<'_, '_> {
         self.emit_force(op);
 
         let opcode = match op.operator().unwrap() {
-            ast::UnaryOpKind::Invert => OpCode::OpInvert,
-            ast::UnaryOpKind::Negate => OpCode::OpNegate,
+            ast::UnaryOpKind::Invert => Op::Invert,
+            ast::UnaryOpKind::Negate => Op::Negate,
         };
 
         self.push_op(opcode, op);
@@ -524,21 +551,21 @@ impl Compiler<'_, '_> {
         self.emit_force(&op.rhs().unwrap());
 
         match op.operator().unwrap() {
-            BinOpKind::Add => self.push_op(OpCode::OpAdd, op),
-            BinOpKind::Sub => self.push_op(OpCode::OpSub, op),
-            BinOpKind::Mul => self.push_op(OpCode::OpMul, op),
-            BinOpKind::Div => self.push_op(OpCode::OpDiv, op),
-            BinOpKind::Update => self.push_op(OpCode::OpAttrsUpdate, op),
-            BinOpKind::Equal => self.push_op(OpCode::OpEqual, op),
-            BinOpKind::Less => self.push_op(OpCode::OpLess, op),
-            BinOpKind::LessOrEq => self.push_op(OpCode::OpLessOrEq, op),
-            BinOpKind::More => self.push_op(OpCode::OpMore, op),
-            BinOpKind::MoreOrEq => self.push_op(OpCode::OpMoreOrEq, op),
-            BinOpKind::Concat => self.push_op(OpCode::OpConcat, op),
+            BinOpKind::Add => self.push_op(Op::Add, op),
+            BinOpKind::Sub => self.push_op(Op::Sub, op),
+            BinOpKind::Mul => self.push_op(Op::Mul, op),
+            BinOpKind::Div => self.push_op(Op::Div, op),
+            BinOpKind::Update => self.push_op(Op::AttrsUpdate, op),
+            BinOpKind::Equal => self.push_op(Op::Equal, op),
+            BinOpKind::Less => self.push_op(Op::Less, op),
+            BinOpKind::LessOrEq => self.push_op(Op::LessOrEq, op),
+            BinOpKind::More => self.push_op(Op::More, op),
+            BinOpKind::MoreOrEq => self.push_op(Op::MoreOrEq, op),
+            BinOpKind::Concat => self.push_op(Op::Concat, op),
 
             BinOpKind::NotEqual => {
-                self.push_op(OpCode::OpEqual, op);
-                self.push_op(OpCode::OpInvert, op)
+                self.push_op(Op::Equal, op);
+                self.push_op(Op::Invert, op)
             }
 
             // Handled by separate branch above.
@@ -559,20 +586,22 @@ impl Compiler<'_, '_> {
         self.compile(slot, node.lhs().unwrap());
         self.emit_force(&node.lhs().unwrap());
 
-        let throw_idx = self.push_op(OpCode::OpJumpIfCatchable(JumpOffset(0)), node);
+        let throw_idx = self.push_op(Op::JumpIfCatchable, node);
+        self.push_u16(0);
         // If this value is false, jump over the right-hand side - the
         // whole expression is false.
-        let end_idx = self.push_op(OpCode::OpJumpIfFalse(JumpOffset(0)), node);
+        let end_idx = self.push_op(Op::JumpIfFalse, node);
+        self.push_u16(0);
 
         // Otherwise, remove the previous value and leave the
         // right-hand side on the stack. Its result is now the value
         // of the whole expression.
-        self.push_op(OpCode::OpPop, node);
+        self.push_op(Op::Pop, node);
         self.compile(slot, node.rhs().unwrap());
         self.emit_force(&node.rhs().unwrap());
 
         self.patch_jump(end_idx);
-        self.push_op(OpCode::OpAssertBool, node);
+        self.push_op(Op::AssertBool, node);
         self.patch_jump(throw_idx);
     }
 
@@ -587,16 +616,18 @@ impl Compiler<'_, '_> {
         self.compile(slot, node.lhs().unwrap());
         self.emit_force(&node.lhs().unwrap());
 
-        let throw_idx = self.push_op(OpCode::OpJumpIfCatchable(JumpOffset(0)), node);
+        let throw_idx = self.push_op(Op::JumpIfCatchable, node);
+        self.push_u16(0);
         // Opposite of above: If this value is **true**, we can
         // short-circuit the right-hand side.
-        let end_idx = self.push_op(OpCode::OpJumpIfTrue(JumpOffset(0)), node);
-        self.push_op(OpCode::OpPop, node);
+        let end_idx = self.push_op(Op::JumpIfTrue, node);
+        self.push_u16(0);
+        self.push_op(Op::Pop, node);
         self.compile(slot, node.rhs().unwrap());
         self.emit_force(&node.rhs().unwrap());
 
         self.patch_jump(end_idx);
-        self.push_op(OpCode::OpAssertBool, node);
+        self.push_op(Op::AssertBool, node);
         self.patch_jump(throw_idx);
     }
 
@@ -610,17 +641,20 @@ impl Compiler<'_, '_> {
         // Leave left-hand side value on the stack and invert it.
         self.compile(slot, node.lhs().unwrap());
         self.emit_force(&node.lhs().unwrap());
-        let throw_idx = self.push_op(OpCode::OpJumpIfCatchable(JumpOffset(0)), node);
-        self.push_op(OpCode::OpInvert, node);
+        let throw_idx = self.push_op(Op::JumpIfCatchable, node);
+        self.push_u16(0);
+        self.push_op(Op::Invert, node);
 
         // Exactly as `||` (because `a -> b` = `!a || b`).
-        let end_idx = self.push_op(OpCode::OpJumpIfTrue(JumpOffset(0)), node);
-        self.push_op(OpCode::OpPop, node);
+        let end_idx = self.push_op(Op::JumpIfTrue, node);
+        self.push_u16(0);
+
+        self.push_op(Op::Pop, node);
         self.compile(slot, node.rhs().unwrap());
         self.emit_force(&node.rhs().unwrap());
 
         self.patch_jump(end_idx);
-        self.push_op(OpCode::OpAssertBool, node);
+        self.push_op(Op::AssertBool, node);
         self.patch_jump(throw_idx);
     }
 
@@ -655,11 +689,8 @@ impl Compiler<'_, '_> {
             self.scope_mut().mark_initialised(item_slot);
         }
 
-        if count == 0 {
-            self.unthunk();
-        }
-
-        self.push_op(OpCode::OpList(Count(count)), node);
+        self.push_op(Op::List, node);
+        self.push_uvarint(count as u64);
         self.scope_mut().end_scope();
     }
 
@@ -688,7 +719,7 @@ impl Compiler<'_, '_> {
         // next nested element, for all fragments except the last one.
         for (count, fragment) in node.attrpath().unwrap().attrs().enumerate() {
             if count > 0 {
-                self.push_op(OpCode::OpAttrsTrySelect, &fragment);
+                self.push_op(Op::AttrsTrySelect, &fragment);
                 self.emit_force(&fragment);
             }
 
@@ -697,7 +728,7 @@ impl Compiler<'_, '_> {
 
         // After the last fragment, emit the actual instruction that
         // leaves a boolean on the stack.
-        self.push_op(OpCode::OpHasAttr, node);
+        self.push_op(Op::HasAttr, node);
     }
 
     /// When compiling select or select_or expressions, an optimisation is
@@ -721,8 +752,9 @@ impl Compiler<'_, '_> {
         // set that is lacking a key, because that thunk is never
         // evaluated). If anything is missing, just move on. We may
         // want to emit warnings here in the future.
-        if let Some(OpCode::OpConstant(ConstantIdx(idx))) = self.chunk().code.last().cloned() {
-            let constant = &mut self.chunk().constants[idx];
+        if let Some((Op::Constant, op_idx)) = self.chunk().last_op() {
+            let (idx, _) = self.chunk().read_uvarint(op_idx + 1);
+            let constant = &mut self.chunk().constants[idx as usize];
             if let Value::Attrs(attrs) = constant {
                 let mut path_iter = path.attrs();
 
@@ -734,10 +766,6 @@ impl Compiler<'_, '_> {
                     if let Some(ident) = expr_static_attr_str(&attr) {
                         if let Some(selected_value) = attrs.select(ident.as_bytes()) {
                             *constant = selected_value.clone();
-
-                            // If this worked, we can unthunk the current thunk.
-                            self.unthunk();
-
                             return true;
                         }
                     }
@@ -771,7 +799,7 @@ impl Compiler<'_, '_> {
             self.emit_force(&set);
 
             self.compile_attr(slot, &fragment);
-            self.push_op(OpCode::OpAttrsSelect, &fragment);
+            self.push_op(Op::AttrsSelect, &fragment);
         }
     }
 
@@ -821,11 +849,13 @@ impl Compiler<'_, '_> {
         for fragment in path.attrs() {
             self.emit_force(&fragment);
             self.compile_attr(slot, &fragment.clone());
-            self.push_op(OpCode::OpAttrsTrySelect, &fragment);
-            jumps.push(self.push_op(OpCode::OpJumpIfNotFound(JumpOffset(0)), &fragment));
+            self.push_op(Op::AttrsTrySelect, &fragment);
+            jumps.push(self.push_op(Op::JumpIfNotFound, &fragment));
+            self.push_u16(0);
         }
 
-        let final_jump = self.push_op(OpCode::OpJump(JumpOffset(0)), &path);
+        let final_jump = self.push_op(Op::Jump, &path);
+        self.push_u16(0);
 
         for jump in jumps {
             self.patch_jump(jump);
@@ -853,17 +883,22 @@ impl Compiler<'_, '_> {
         // Compile the assertion condition to leave its value on the stack.
         self.compile(slot, node.condition().unwrap());
         self.emit_force(&node.condition().unwrap());
-        let throw_idx = self.push_op(OpCode::OpJumpIfCatchable(JumpOffset(0)), node);
-        let then_idx = self.push_op(OpCode::OpJumpIfFalse(JumpOffset(0)), node);
 
-        self.push_op(OpCode::OpPop, node);
+        let throw_idx = self.push_op(Op::JumpIfCatchable, node);
+        self.push_u16(0);
+
+        let then_idx = self.push_op(Op::JumpIfFalse, node);
+        self.push_u16(0);
+
+        self.push_op(Op::Pop, node);
         self.compile(slot, node.body().unwrap());
 
-        let else_idx = self.push_op(OpCode::OpJump(JumpOffset(0)), node);
+        let else_idx = self.push_op(Op::Jump, node);
+        self.push_u16(0);
 
         self.patch_jump(then_idx);
-        self.push_op(OpCode::OpPop, node);
-        self.push_op(OpCode::OpAssertFail, &node.condition().unwrap());
+        self.push_op(Op::Pop, node);
+        self.push_op(Op::AssertFail, &node.condition().unwrap());
 
         self.patch_jump(else_idx);
         self.patch_jump(throw_idx);
@@ -885,22 +920,20 @@ impl Compiler<'_, '_> {
         self.compile(slot, node.condition().unwrap());
         self.emit_force(&node.condition().unwrap());
 
-        let throw_idx = self.push_op(
-            OpCode::OpJumpIfCatchable(JumpOffset(0)),
-            &node.condition().unwrap(),
-        );
-        let then_idx = self.push_op(
-            OpCode::OpJumpIfFalse(JumpOffset(0)),
-            &node.condition().unwrap(),
-        );
+        let throw_idx = self.push_op(Op::JumpIfCatchable, &node.condition().unwrap());
+        self.push_u16(0);
 
-        self.push_op(OpCode::OpPop, node); // discard condition value
+        let then_idx = self.push_op(Op::JumpIfFalse, &node.condition().unwrap());
+        self.push_u16(0);
+
+        self.push_op(Op::Pop, node); // discard condition value
         self.compile(slot, node.body().unwrap());
 
-        let else_idx = self.push_op(OpCode::OpJump(JumpOffset(0)), node);
+        let else_idx = self.push_op(Op::Jump, node);
+        self.push_u16(0);
 
         self.patch_jump(then_idx); // patch jump *to* else_body
-        self.push_op(OpCode::OpPop, node); // discard condition value
+        self.push_op(Op::Pop, node); // discard condition value
         self.compile(slot, node.else_body().unwrap());
 
         self.patch_jump(else_idx); // patch jump *over* else body
@@ -929,11 +962,12 @@ impl Compiler<'_, '_> {
 
         self.scope_mut().push_with();
 
-        self.push_op(OpCode::OpPushWith(with_idx), &node.namespace().unwrap());
+        self.push_op(Op::PushWith, &node.namespace().unwrap());
+        self.push_uvarint(with_idx.0 as u64);
 
         self.compile(slot, node.body().unwrap());
 
-        self.push_op(OpCode::OpPopWith, node);
+        self.push_op(Op::PopWith, node);
         self.scope_mut().pop_with();
         self.cleanup_scope(node);
     }
@@ -993,13 +1027,15 @@ impl Compiler<'_, '_> {
         // At call time, the attribute set is already at the top of the stack.
         self.scope_mut().mark_initialised(set_idx);
         self.emit_force(pattern);
-        let throw_idx = self.push_op(OpCode::OpJumpIfCatchable(JumpOffset(0)), pattern);
+        let throw_idx = self.push_op(Op::JumpIfCatchable, pattern);
+        self.push_u16(0);
+
         // Evaluation fails on a type error, even if the argument(s) are unused.
-        self.push_op(OpCode::OpAssertAttrs, pattern);
+        self.push_op(Op::AssertAttrs, pattern);
 
         let ellipsis = pattern.ellipsis_token().is_some();
         if !ellipsis {
-            self.push_op(OpCode::OpValidateClosedFormals, pattern);
+            self.push_op(Op::ValidateClosedFormals, pattern);
         }
 
         // Similar to `let ... in ...`, we now do multiple passes over
@@ -1039,7 +1075,8 @@ impl Compiler<'_, '_> {
         // attempt to select from it.
         let stack_idx = self.scope().stack_index(set_idx);
         for tracked_formal in entries.iter() {
-            self.push_op(OpCode::OpGetLocal(stack_idx), pattern);
+            self.push_op(Op::GetLocal, pattern);
+            self.push_uvarint(stack_idx.0 as u64);
             self.emit_literal_ident(&tracked_formal.pattern_entry().ident().unwrap());
 
             let idx = tracked_formal.local_idx();
@@ -1068,14 +1105,14 @@ impl Compiler<'_, '_> {
                     // we only know better after compiling the default expression, so
                     // avoiding unnecessary locals would mean we'd need to modify the chunk
                     // after the fact.
-                    self.push_op(OpCode::OpAttrsTrySelect, &pattern_entry.ident().unwrap());
-                    let jump_to_default =
-                        self.push_op(OpCode::OpJumpIfNotFound(JumpOffset(0)), default_expr);
+                    self.push_op(Op::AttrsTrySelect, &pattern_entry.ident().unwrap());
+                    let jump_to_default = self.push_op(Op::JumpIfNotFound, default_expr);
+                    self.push_u16(0);
 
                     self.emit_constant(Value::FinaliseRequest(false), default_expr);
 
-                    let jump_over_default =
-                        self.push_op(OpCode::OpJump(JumpOffset(0)), default_expr);
+                    let jump_over_default = self.push_op(Op::Jump, default_expr);
+                    self.push_u16(0);
 
                     self.patch_jump(jump_to_default);
 
@@ -1087,7 +1124,7 @@ impl Compiler<'_, '_> {
                     self.patch_jump(jump_over_default);
                 }
                 TrackedFormal::NoDefault { pattern_entry, .. } => {
-                    self.push_op(OpCode::OpAttrsSelect, &pattern_entry.ident().unwrap());
+                    self.push_op(Op::AttrsSelect, &pattern_entry.ident().unwrap());
                 }
             }
 
@@ -1111,23 +1148,16 @@ impl Compiler<'_, '_> {
                         let finalise_request_stack_idx = self.scope().stack_index(*finalise_request_idx);
 
                         // TODO(sterni): better spans
-                        self.push_op(
-                            OpCode::OpGetLocal(finalise_request_stack_idx),
-                            pattern
-                        );
+                        self.push_op(Op::GetLocal, pattern);
+                        self.push_uvarint(finalise_request_stack_idx.0 as u64);
                         let jump_over_finalise =
-                            self.push_op(
-                                OpCode::OpJumpIfNoFinaliseRequest(
-                                    JumpOffset(0)),
-                                pattern
-                            );
-                        self.push_op(
-                            OpCode::OpFinalise(stack_idx),
-                            pattern,
-                        );
+                            self.push_op(Op::JumpIfNoFinaliseRequest, pattern);
+                        self.push_u16(0);
+                        self.push_op(Op::Finalise, pattern);
+                        self.push_uvarint(stack_idx.0 as u64);
                         self.patch_jump(jump_over_finalise);
                         // Get rid of finaliser request value on the stack
-                        self.push_op(OpCode::OpPop, pattern);
+                        self.push_op(Op::Pop, pattern);
                     }
                 }
             }
@@ -1186,12 +1216,6 @@ impl Compiler<'_, '_> {
         })
     }
 
-    /// Mark the current thunk as redundant, i.e. possible to merge directly
-    /// into its parent lambda context without affecting runtime behaviour.
-    fn unthunk(&mut self) {
-        self.context_mut().unthunk = true;
-    }
-
     /// Compile an expression into a runtime closure or thunk
     fn compile_lambda_or_thunk<N, F>(
         &mut self,
@@ -1220,31 +1244,15 @@ impl Compiler<'_, '_> {
             self.patch_jump(throw_idx);
         }
 
-        // TODO: determine and insert enclosing name, if available.
-
         // Pop the lambda context back off, and emit the finished
         // lambda as a constant.
         let mut compiled = self.contexts.pop().unwrap();
 
-        // The compiler might have decided to unthunk, i.e. raise the compiled
-        // code to the parent context. In that case we do so and return right
-        // away.
-        if compiled.unthunk && is_suspended_thunk {
-            self.chunk().extend(compiled.lambda.chunk);
-            return;
-        }
-
         // Emit an instruction to inform the VM that the chunk has ended.
         compiled
             .lambda
             .chunk
-            .push_op(OpCode::OpReturn, self.span_for(node));
-
-        // Capturing the with stack counts as an upvalue, as it is
-        // emitted as an upvalue data instruction.
-        if compiled.captures_with_stack {
-            compiled.lambda.upvalue_count += 1;
-        }
+            .push_op(Op::Return, self.span_for(node));
 
         let lambda = Rc::new(compiled.lambda);
         if is_suspended_thunk {
@@ -1254,10 +1262,10 @@ impl Compiler<'_, '_> {
         }
 
         // If no upvalues are captured, emit directly and move on.
-        if lambda.upvalue_count == 0 {
+        if lambda.upvalue_count == 0 && !compiled.captures_with_stack {
             self.emit_constant(
                 if is_suspended_thunk {
-                    Value::Thunk(Thunk::new_suspended(lambda, LightSpan::new_actual(span)))
+                    Value::Thunk(Thunk::new_suspended(lambda, span))
                 } else {
                     Value::Closure(Rc::new(Closure::new(lambda)))
                 },
@@ -1274,12 +1282,13 @@ impl Compiler<'_, '_> {
 
         let code_idx = self.push_op(
             if is_suspended_thunk {
-                OpCode::OpThunkSuspended(blueprint_idx)
+                Op::ThunkSuspended
             } else {
-                OpCode::OpThunkClosure(blueprint_idx)
+                Op::ThunkClosure
             },
             node,
         );
+        self.push_uvarint(blueprint_idx.0 as u64);
 
         self.emit_upvalue_data(
             outer_slot,
@@ -1290,18 +1299,21 @@ impl Compiler<'_, '_> {
 
         if !is_suspended_thunk && !self.scope()[outer_slot].needs_finaliser {
             if !self.scope()[outer_slot].must_thunk {
-                // The closure has upvalues, but is not recursive.  Therefore no thunk is required,
-                // which saves us the overhead of Rc<RefCell<>>
-                self.chunk()[code_idx] = OpCode::OpClosure(blueprint_idx);
+                // The closure has upvalues, but is not recursive. Therefore no
+                // thunk is required, which saves us the overhead of
+                // Rc<RefCell<>>
+                self.chunk().code[code_idx.0] = Op::Closure as u8;
             } else {
-                // This case occurs when a closure has upvalue-references to itself but does not need a
-                // finaliser.  Since no OpFinalise will be emitted later on we synthesize one here.
-                // It is needed here only to set [`Closure::is_finalised`] which is used for sanity checks.
+                // This case occurs when a closure has upvalue-references to
+                // itself but does not need a finaliser. Since no OpFinalise
+                // will be emitted later on we synthesize one here. It is needed
+                // here only to set [`Closure::is_finalised`] which is used for
+                // sanity checks.
                 #[cfg(debug_assertions)]
-                self.push_op(
-                    OpCode::OpFinalise(self.scope().stack_index(outer_slot)),
-                    &self.span_for(node),
-                );
+                {
+                    self.push_op(Op::Finalise, &self.span_for(node));
+                    self.push_uvarint(self.scope().stack_index(outer_slot).0 as u64);
+                }
             }
         }
     }
@@ -1314,7 +1326,7 @@ impl Compiler<'_, '_> {
         self.compile(slot, node.argument().unwrap());
         self.compile(slot, node.lambda().unwrap());
         self.emit_force(&node.lambda().unwrap());
-        self.push_op(OpCode::OpCall, node);
+        self.push_op(Op::Call, node);
     }
 
     /// Emit the data instructions that the runtime needs to correctly
@@ -1322,10 +1334,18 @@ impl Compiler<'_, '_> {
     fn emit_upvalue_data<T: ToSpan>(
         &mut self,
         slot: LocalIdx,
-        node: &T,
+        _: &T, // TODO
         upvalues: Vec<Upvalue>,
         capture_with: bool,
     ) {
+        // Push the count of arguments to be expected, with one bit set to
+        // indicate whether the with stack needs to be captured.
+        let mut count = (upvalues.len() as u64) << 1;
+        if capture_with {
+            count |= 1;
+        }
+        self.push_uvarint(count);
+
         for upvalue in upvalues {
             match upvalue.kind {
                 UpvalueKind::Local(idx) => {
@@ -1335,27 +1355,22 @@ impl Compiler<'_, '_> {
                     // If the target is not yet initialised, we need to defer
                     // the local access
                     if !target.initialised {
-                        self.push_op(OpCode::DataDeferredLocal(stack_idx), &upvalue.span);
+                        self.push_uvarint(Position::deferred_local(stack_idx).0);
                         self.scope_mut().mark_needs_finaliser(slot);
                     } else {
                         // a self-reference
                         if slot == idx {
                             self.scope_mut().mark_must_thunk(slot);
                         }
-                        self.push_op(OpCode::DataStackIdx(stack_idx), &upvalue.span);
+                        self.push_uvarint(Position::stack_index(stack_idx).0);
                     }
                 }
 
                 UpvalueKind::Upvalue(idx) => {
-                    self.push_op(OpCode::DataUpvalueIdx(idx), &upvalue.span);
+                    self.push_uvarint(Position::upvalue_index(idx).0);
                 }
             };
         }
-
-        if capture_with {
-            // TODO(tazjin): probably better to emit span for the ident that caused this
-            self.push_op(OpCode::DataCaptureWith, node);
-        }
     }
 
     /// Emit the literal string value of an identifier. Required for
@@ -1372,20 +1387,7 @@ impl Compiler<'_, '_> {
     /// not known at the time when the jump operation itself is
     /// emitted.
     fn patch_jump(&mut self, idx: CodeIdx) {
-        let offset = JumpOffset(self.chunk().code.len() - 1 - idx.0);
-
-        match &mut self.chunk().code[idx.0] {
-            OpCode::OpJump(n)
-            | OpCode::OpJumpIfFalse(n)
-            | OpCode::OpJumpIfTrue(n)
-            | OpCode::OpJumpIfCatchable(n)
-            | OpCode::OpJumpIfNotFound(n)
-            | OpCode::OpJumpIfNoFinaliseRequest(n) => {
-                *n = offset;
-            }
-
-            op => panic!("attempted to patch unsupported op: {:?}", op),
-        }
+        self.chunk().patch_jump(idx.0);
     }
 
     /// Decrease scope depth of the current function and emit
@@ -1401,7 +1403,8 @@ impl Compiler<'_, '_> {
         }
 
         if popcount > 0 {
-            self.push_op(OpCode::OpCloseScope(Count(popcount)), node);
+            self.push_op(Op::CloseScope, node);
+            self.push_uvarint(popcount as u64);
         }
     }
 
@@ -1459,16 +1462,7 @@ impl Compiler<'_, '_> {
     }
 
     fn emit_force<N: ToSpan>(&mut self, node: &N) {
-        if let Some(&OpCode::OpConstant(c)) = self.chunk().last_op() {
-            if !self.chunk().get_constant(c).unwrap().is_thunk() {
-                // Optimization: Don't emit a force op for non-thunk constants, since they don't
-                // need one!
-                // TODO: this is probably doable for more ops (?)
-                return;
-            }
-        }
-
-        self.push_op(OpCode::OpForce, node);
+        self.push_op(Op::Force, node);
     }
 
     fn emit_warning<N: ToSpan>(&mut self, node: &N, kind: WarningKind) {
@@ -1549,6 +1543,7 @@ fn compile_src_builtin(
             &parsed.tree().expr().unwrap(),
             None,
             weak.upgrade().unwrap(),
+            None,
             &source,
             &file,
             &mut crate::observer::NoOpObserver {},
@@ -1565,10 +1560,7 @@ fn compile_src_builtin(
             });
         }
 
-        Ok(Value::Thunk(Thunk::new_suspended(
-            result.lambda,
-            LightSpan::Actual { span: file.span },
-        )))
+        Ok(Value::Thunk(Thunk::new_suspended(result.lambda, file.span)))
     })))
 }
 
@@ -1589,7 +1581,7 @@ pub fn prepare_globals(
     Rc::new_cyclic(Box::new(move |weak: &Weak<GlobalsMap>| {
         // First step is to construct the builtins themselves as
         // `NixAttrs`.
-        let mut builtins: GlobalsMap = HashMap::from_iter(builtins);
+        let mut builtins: GlobalsMap = FxHashMap::from_iter(builtins);
 
         // At this point, optionally insert `import` if enabled. To
         // "tie the knot" of `import` needing the full set of globals
@@ -1602,7 +1594,7 @@ pub fn prepare_globals(
 
         // Next, the actual map of globals which the compiler will use
         // to resolve identifiers is constructed.
-        let mut globals: GlobalsMap = HashMap::new();
+        let mut globals: GlobalsMap = FxHashMap::default();
 
         // builtins contain themselves (`builtins.builtins`), which we
         // can resolve by manually constructing a suspended thunk that
@@ -1655,11 +1647,12 @@ pub fn compile(
     expr: &ast::Expr,
     location: Option<PathBuf>,
     globals: Rc<GlobalsMap>,
+    env: Option<&FxHashMap<SmolStr, Value>>,
     source: &SourceCode,
     file: &codemap::File,
     observer: &mut dyn CompilerObserver,
 ) -> EvalResult<CompilationOutput> {
-    let mut c = Compiler::new(location, globals.clone(), source, file, observer)?;
+    let mut c = Compiler::new(location, globals.clone(), env, source, file, observer)?;
 
     let root_span = c.span_for(expr);
     let root_slot = c.scope_mut().declare_phantom(root_span, false);
@@ -1670,7 +1663,13 @@ pub fn compile(
     // unevaluated state (though in practice, a value *containing* a
     // thunk might be returned).
     c.emit_force(expr);
-    c.push_op(OpCode::OpReturn, &root_span);
+    if let Some(env) = env {
+        if !env.is_empty() {
+            c.push_op(Op::CloseScope, &root_span);
+            c.push_uvarint(env.len() as u64);
+        }
+    }
+    c.push_op(Op::Return, &root_span);
 
     let lambda = Rc::new(c.contexts.pop().unwrap().lambda);
     c.observer.observe_compiled_toplevel(&lambda);
@@ -1679,6 +1678,5 @@ pub fn compile(
         lambda,
         warnings: c.warnings,
         errors: c.errors,
-        globals,
     })
 }
diff --git a/tvix/eval/src/compiler/scope.rs b/tvix/eval/src/compiler/scope.rs
index 892727c107c9..bb1784e67b74 100644
--- a/tvix/eval/src/compiler/scope.rs
+++ b/tvix/eval/src/compiler/scope.rs
@@ -10,10 +10,8 @@
 //! stack indices. To do this, the compiler simulates where locals
 //! will be at runtime using the data structures implemented here.
 
-use std::{
-    collections::{hash_map, HashMap},
-    ops::Index,
-};
+use rustc_hash::FxHashMap;
+use std::{collections::hash_map, ops::Index};
 
 use smol_str::SmolStr;
 
@@ -38,7 +36,7 @@ pub struct Local {
     name: LocalName,
 
     /// Source span at which this local was declared.
-    pub span: codemap::Span,
+    pub span: Option<codemap::Span>,
 
     /// Scope depth of this local.
     pub depth: usize,
@@ -73,6 +71,10 @@ impl Local {
             LocalName::Phantom => false,
         }
     }
+
+    pub fn is_used(&self) -> bool {
+        self.depth == 0 || self.used || self.is_ignored()
+    }
 }
 
 /// Represents the current position of an identifier as resolved in a scope.
@@ -103,7 +105,6 @@ pub enum UpvalueKind {
 #[derive(Clone, Debug)]
 pub struct Upvalue {
     pub kind: UpvalueKind,
-    pub span: codemap::Span,
 }
 
 /// The index of a local in the scope's local array at compile time.
@@ -164,7 +165,7 @@ pub struct Scope {
     pub upvalues: Vec<Upvalue>,
 
     /// Secondary by-name index over locals.
-    by_name: HashMap<String, ByName>,
+    by_name: FxHashMap<String, ByName>,
 
     /// How many scopes "deep" are these locals?
     scope_depth: usize,
@@ -240,7 +241,7 @@ impl Scope {
         let idx = self.locals.len();
         self.locals.push(Local {
             initialised,
-            span,
+            span: Some(span),
             name: LocalName::Phantom,
             depth: self.scope_depth,
             needs_finaliser: false,
@@ -263,7 +264,7 @@ impl Scope {
         let idx = LocalIdx(self.locals.len());
         self.locals.push(Local {
             name: LocalName::Ident(name.clone()),
-            span,
+            span: Some(span),
             depth: self.scope_depth,
             initialised: false,
             needs_finaliser: false,
@@ -286,6 +287,23 @@ impl Scope {
         (idx, shadowed)
     }
 
+    pub fn declare_constant(&mut self, name: String) -> LocalIdx {
+        let idx = LocalIdx(self.locals.len());
+        self.locals.push(Local {
+            name: LocalName::Ident(name.clone()),
+            span: None,
+            depth: 0,
+            initialised: true,
+            used: false,
+            needs_finaliser: false,
+            must_thunk: false,
+        });
+        // We don't need to worry about shadowing for constants; they're defined at the toplevel
+        // always
+        self.by_name.insert(name, ByName::Single(idx));
+        idx
+    }
+
     /// Mark local as initialised after compiling its expression.
     pub fn mark_initialised(&mut self, idx: LocalIdx) {
         self.locals[idx.0].initialised = true;
@@ -348,8 +366,8 @@ impl Scope {
                 // lifetime, and emit a warning otherwise (unless the
                 // user explicitly chose to ignore it by prefixing the
                 // identifier with `_`)
-                if !local.used && !local.is_ignored() {
-                    unused_spans.push(local.span);
+                if local.is_used() {
+                    unused_spans.extend(local.span);
                 }
 
                 // remove the by-name index if this was a named local
diff --git a/tvix/eval/src/errors.rs b/tvix/eval/src/errors.rs
index 652252dadfa0..ee55552c7ac5 100644
--- a/tvix/eval/src/errors.rs
+++ b/tvix/eval/src/errors.rs
@@ -10,7 +10,6 @@ use std::{fmt::Debug, fmt::Display, num::ParseIntError};
 use codemap::{File, Span};
 use codemap_diagnostic::{ColorConfig, Diagnostic, Emitter, Level, SpanLabel, SpanStyle};
 use smol_str::SmolStr;
-use xml::writer::Error as XmlError;
 
 use crate::spans::ToSpan;
 use crate::value::{CoercionKind, NixString};
@@ -110,7 +109,7 @@ pub enum ErrorKind {
     UnknownDynamicVariable(String),
 
     /// User is defining the same variable twice at the same depth.
-    VariableAlreadyDefined(Span),
+    VariableAlreadyDefined(Option<Span>),
 
     /// Attempt to call something that is not callable.
     NotCallable(&'static str),
@@ -185,8 +184,11 @@ pub enum ErrorKind {
     /// Errors converting TOML to a value
     FromTomlError(String),
 
+    /// An unexpected argument was supplied to a builtin
+    UnexpectedArgumentBuiltin(NixString),
+
     /// An unexpected argument was supplied to a function that takes formal parameters
-    UnexpectedArgument {
+    UnexpectedArgumentFormals {
         arg: NixString,
         formals_span: Span,
     },
@@ -194,9 +196,6 @@ pub enum ErrorKind {
     /// Invalid UTF-8 was encoutered somewhere
     Utf8,
 
-    /// Errors while serialising to XML.
-    Xml(Rc<XmlError>),
-
     /// Variant for errors that bubble up to eval from other Tvix
     /// components.
     TvixError(Rc<dyn error::Error>),
@@ -248,7 +247,6 @@ impl error::Error for Error {
                 errors.first().map(|e| e as &dyn error::Error)
             }
             ErrorKind::IO { error, .. } => Some(error.as_ref()),
-            ErrorKind::Xml(error) => Some(error.as_ref()),
             ErrorKind::TvixError(error) => Some(error.as_ref()),
             _ => None,
         }
@@ -285,12 +283,6 @@ impl From<bstr::FromUtf8Error> for ErrorKind {
     }
 }
 
-impl From<XmlError> for ErrorKind {
-    fn from(err: XmlError) -> Self {
-        Self::Xml(Rc::new(err))
-    }
-}
-
 impl From<io::Error> for ErrorKind {
     fn from(e: io::Error) -> Self {
         ErrorKind::IO {
@@ -498,7 +490,11 @@ to a missing value in the attribute set(s) included via `with`."#,
                 write!(f, "Error converting TOML to a Nix value: {msg}")
             }
 
-            ErrorKind::UnexpectedArgument { arg, .. } => {
+            ErrorKind::UnexpectedArgumentBuiltin(arg) => {
+                write!(f, "Unexpected agrument `{arg}` passed to builtin",)
+            }
+
+            ErrorKind::UnexpectedArgumentFormals { arg, .. } => {
                 write!(f, "Unexpected argument `{arg}` supplied to function",)
             }
 
@@ -506,8 +502,6 @@ to a missing value in the attribute set(s) included via `with`."#,
                 write!(f, "Invalid UTF-8 in string")
             }
 
-            ErrorKind::Xml(error) => write!(f, "failed to serialise to XML: {error}"),
-
             ErrorKind::TvixError(inner_error) => {
                 write!(f, "{inner_error}")
             }
@@ -791,7 +785,8 @@ impl Error {
             ErrorKind::DuplicateAttrsKey { .. } => "in this attribute set",
             ErrorKind::InvalidAttributeName(_) => "in this attribute set",
             ErrorKind::RelativePathResolution(_) => "in this path literal",
-            ErrorKind::UnexpectedArgument { .. } => "in this function call",
+            ErrorKind::UnexpectedArgumentBuiltin { .. } => "while calling this builtin",
+            ErrorKind::UnexpectedArgumentFormals { .. } => "in this function call",
             ErrorKind::UnexpectedContext => "in this string",
 
             // The spans for some errors don't have any more descriptive stuff
@@ -823,7 +818,6 @@ impl Error {
             | ErrorKind::JsonError(_)
             | ErrorKind::NotSerialisableToJson(_)
             | ErrorKind::FromTomlError(_)
-            | ErrorKind::Xml(_)
             | ErrorKind::Utf8
             | ErrorKind::TvixError(_)
             | ErrorKind::TvixBug { .. }
@@ -867,15 +861,15 @@ impl Error {
             ErrorKind::ImportCompilerError { .. } => "E028",
             ErrorKind::IO { .. } => "E029",
             ErrorKind::JsonError { .. } => "E030",
-            ErrorKind::UnexpectedArgument { .. } => "E031",
+            ErrorKind::UnexpectedArgumentFormals { .. } => "E031",
             ErrorKind::RelativePathResolution(_) => "E032",
             ErrorKind::DivisionByZero => "E033",
-            ErrorKind::Xml(_) => "E034",
             ErrorKind::FromTomlError(_) => "E035",
             ErrorKind::NotSerialisableToJson(_) => "E036",
             ErrorKind::UnexpectedContext => "E037",
             ErrorKind::Utf8 => "E038",
             ErrorKind::UnknownHashType(_) => "E039",
+            ErrorKind::UnexpectedArgumentBuiltin { .. } => "E040",
 
             // Special error code for errors from other Tvix
             // components. We may want to introduce a code namespacing
@@ -913,7 +907,7 @@ impl Error {
                 spans_for_parse_errors(&file, errors)
             }
 
-            ErrorKind::UnexpectedArgument { formals_span, .. } => {
+            ErrorKind::UnexpectedArgumentFormals { formals_span, .. } => {
                 vec![
                     SpanLabel {
                         label: self.span_label(),
diff --git a/tvix/eval/src/io.rs b/tvix/eval/src/io.rs
index f775077af818..7e8b85c87abb 100644
--- a/tvix/eval/src/io.rs
+++ b/tvix/eval/src/io.rs
@@ -16,15 +16,17 @@
 //! how store paths are opened and so on.
 
 use std::{
-    fs::File,
     io,
     path::{Path, PathBuf},
 };
 
-#[cfg(target_family = "unix")]
+#[cfg(all(target_family = "unix", feature = "impure"))]
 use std::os::unix::ffi::OsStringExt;
 
-/// Types of files as represented by `builtins.readDir` in Nix.
+#[cfg(feature = "impure")]
+use std::fs::File;
+
+/// Types of files as represented by `builtins.readFileType` and `builtins.readDir` in Nix.
 #[derive(Debug)]
 pub enum FileType {
     Directory,
@@ -33,6 +35,19 @@ pub enum FileType {
     Unknown,
 }
 
+impl std::fmt::Display for FileType {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        let type_as_str = match &self {
+            FileType::Directory => "directory",
+            FileType::Regular => "regular",
+            FileType::Symlink => "symlink",
+            FileType::Unknown => "unknown",
+        };
+
+        write!(f, "{}", type_as_str)
+    }
+}
+
 /// Represents all possible filesystem interactions that exist in the Nix
 /// language, and that need to be executed somehow.
 ///
@@ -52,6 +67,10 @@ pub trait EvalIO {
     /// Open the file at the specified path to a `io::Read`.
     fn open(&self, path: &Path) -> io::Result<Box<dyn io::Read>>;
 
+    /// Return the [FileType] of the given path, or an error if it doesn't
+    /// exist.
+    fn file_type(&self, path: &Path) -> io::Result<FileType>;
+
     /// Read the directory at the specified path and return the names
     /// of its entries associated with their [`FileType`].
     ///
@@ -92,13 +111,28 @@ pub struct StdIO;
 #[cfg(feature = "impure")]
 impl EvalIO for StdIO {
     fn path_exists(&self, path: &Path) -> io::Result<bool> {
-        path.try_exists()
+        // In general, an IO error indicates the path doesn't exist
+        Ok(path.try_exists().unwrap_or(false))
     }
 
     fn open(&self, path: &Path) -> io::Result<Box<dyn io::Read>> {
         Ok(Box::new(File::open(path)?))
     }
 
+    fn file_type(&self, path: &Path) -> io::Result<FileType> {
+        let file_type = std::fs::symlink_metadata(path)?;
+
+        Ok(if file_type.is_dir() {
+            FileType::Directory
+        } else if file_type.is_file() {
+            FileType::Regular
+        } else if file_type.is_symlink() {
+            FileType::Symlink
+        } else {
+            FileType::Unknown
+        })
+    }
+
     fn read_dir(&self, path: &Path) -> io::Result<Vec<(bytes::Bytes, FileType)>> {
         let mut result = vec![];
 
@@ -148,6 +182,13 @@ impl EvalIO for DummyIO {
         ))
     }
 
+    fn file_type(&self, _: &Path) -> io::Result<FileType> {
+        Err(io::Error::new(
+            io::ErrorKind::Unsupported,
+            "I/O methods are not implemented in DummyIO",
+        ))
+    }
+
     fn read_dir(&self, _: &Path) -> io::Result<Vec<(bytes::Bytes, FileType)>> {
         Err(io::Error::new(
             io::ErrorKind::Unsupported,
diff --git a/tvix/eval/src/lib.rs b/tvix/eval/src/lib.rs
index 845964cb7e00..6897e1dd494f 100644
--- a/tvix/eval/src/lib.rs
+++ b/tvix/eval/src/lib.rs
@@ -29,25 +29,25 @@ mod vm;
 mod warnings;
 
 mod nix_search_path;
-#[cfg(test)]
+#[cfg(all(test, feature = "arbitrary"))]
 mod properties;
 #[cfg(test)]
 mod test_utils;
 #[cfg(test)]
 mod tests;
 
+use rustc_hash::FxHashMap;
 use std::path::PathBuf;
 use std::rc::Rc;
 use std::str::FromStr;
 use std::sync::Arc;
 
-use crate::compiler::GlobalsMap;
 use crate::observer::{CompilerObserver, RuntimeObserver};
 use crate::value::Lambda;
 use crate::vm::run_lambda;
 
 // Re-export the public interface used by other crates.
-pub use crate::compiler::{compile, prepare_globals, CompilationOutput};
+pub use crate::compiler::{compile, prepare_globals, CompilationOutput, GlobalsMap};
 pub use crate::errors::{AddContext, CatchableErrorKind, Error, ErrorKind, EvalResult};
 pub use crate::io::{DummyIO, EvalIO, FileType};
 pub use crate::pretty_ast::pretty_print_expr;
@@ -56,60 +56,326 @@ pub use crate::value::{NixContext, NixContextElement};
 pub use crate::vm::generators;
 pub use crate::warnings::{EvalWarning, WarningKind};
 pub use builtin_macros;
+use smol_str::SmolStr;
 
 pub use crate::value::{Builtin, CoercionKind, NixAttrs, NixList, NixString, Value};
 
 #[cfg(feature = "impure")]
 pub use crate::io::StdIO;
 
+struct BuilderBuiltins {
+    builtins: Vec<(&'static str, Value)>,
+    src_builtins: Vec<(&'static str, &'static str)>,
+}
+
+enum BuilderGlobals {
+    Builtins(BuilderBuiltins),
+    Globals(Rc<GlobalsMap>),
+}
+
+/// Builder for building an [`Evaluation`].
+///
+/// Construct an [`EvaluationBuilder`] by calling one of:
+///
+/// - [`Evaluation::builder`] / [`EvaluationBuilder::new`]
+/// - [`Evaluation::builder_impure`] [`EvaluationBuilder::new_impure`]
+/// - [`Evaluation::builder_pure`] [`EvaluationBuilder::new_pure`]
+///
+/// Then configure the fields by calling the various methods on [`EvaluationBuilder`], and finally
+/// call [`build`](Self::build) to construct an [`Evaluation`]
+pub struct EvaluationBuilder<'co, 'ro, 'env, IO> {
+    source_map: Option<SourceCode>,
+    globals: BuilderGlobals,
+    env: Option<&'env FxHashMap<SmolStr, Value>>,
+    io_handle: IO,
+    enable_import: bool,
+    strict: bool,
+    nix_path: Option<String>,
+    compiler_observer: Option<&'co mut dyn CompilerObserver>,
+    runtime_observer: Option<&'ro mut dyn RuntimeObserver>,
+}
+
+impl<'co, 'ro, 'env, IO> EvaluationBuilder<'co, 'ro, 'env, IO>
+where
+    IO: AsRef<dyn EvalIO> + 'static,
+{
+    /// Build an [`Evaluation`] based on the configuration in this builder.
+    ///
+    /// This:
+    ///
+    /// - Adds a `"storeDir"` builtin containing the store directory of the configured IO handle
+    /// - Sets up globals based on the configured builtins
+    /// - Copies all other configured fields to the [`Evaluation`]
+    pub fn build(self) -> Evaluation<'co, 'ro, 'env, IO> {
+        let source_map = self.source_map.unwrap_or_default();
+
+        let globals = match self.globals {
+            BuilderGlobals::Globals(globals) => globals,
+            BuilderGlobals::Builtins(BuilderBuiltins {
+                mut builtins,
+                src_builtins,
+            }) => {
+                // Insert a storeDir builtin *iff* a store directory is present.
+                if let Some(store_dir) = self.io_handle.as_ref().store_dir() {
+                    builtins.push(("storeDir", store_dir.into()));
+                }
+
+                crate::compiler::prepare_globals(
+                    builtins,
+                    src_builtins,
+                    source_map.clone(),
+                    self.enable_import,
+                )
+            }
+        };
+
+        Evaluation {
+            source_map,
+            globals,
+            env: self.env,
+            io_handle: self.io_handle,
+            strict: self.strict,
+            nix_path: self.nix_path,
+            compiler_observer: self.compiler_observer,
+            runtime_observer: self.runtime_observer,
+        }
+    }
+}
+
+// NOTE(aspen): The methods here are intentionally incomplete; feel free to add new ones (ideally
+// with similar naming conventions to the ones already present) but don't expose fields publically!
+impl<'co, 'ro, 'env, IO> EvaluationBuilder<'co, 'ro, 'env, IO> {
+    pub fn new(io_handle: IO) -> Self {
+        let mut builtins = builtins::pure_builtins();
+        builtins.extend(builtins::placeholders()); // these are temporary
+
+        Self {
+            source_map: None,
+            enable_import: false,
+            io_handle,
+            globals: BuilderGlobals::Builtins(BuilderBuiltins {
+                builtins,
+                src_builtins: vec![],
+            }),
+            env: None,
+            strict: false,
+            nix_path: None,
+            compiler_observer: None,
+            runtime_observer: None,
+        }
+    }
+
+    pub fn io_handle<IO2>(self, io_handle: IO2) -> EvaluationBuilder<'co, 'ro, 'env, IO2> {
+        EvaluationBuilder {
+            io_handle,
+            source_map: self.source_map,
+            globals: self.globals,
+            env: self.env,
+            enable_import: self.enable_import,
+            strict: self.strict,
+            nix_path: self.nix_path,
+            compiler_observer: self.compiler_observer,
+            runtime_observer: self.runtime_observer,
+        }
+    }
+
+    pub fn with_enable_import(self, enable_import: bool) -> Self {
+        Self {
+            enable_import,
+            ..self
+        }
+    }
+
+    pub fn disable_import(self) -> Self {
+        self.with_enable_import(false)
+    }
+
+    pub fn enable_import(self) -> Self {
+        self.with_enable_import(true)
+    }
+
+    fn builtins_mut(&mut self) -> &mut BuilderBuiltins {
+        match &mut self.globals {
+            BuilderGlobals::Builtins(builtins) => builtins,
+            BuilderGlobals::Globals(_) => {
+                panic!("Cannot modify builtins on an EvaluationBuilder with globals configured")
+            }
+        }
+    }
+
+    /// Add additional builtins (represented as tuples of name and [`Value`]) to this evaluation
+    /// builder.
+    ///
+    /// # Panics
+    ///
+    /// Panics if this evaluation builder has had globals set via [`with_globals`]
+    pub fn add_builtins<I>(mut self, builtins: I) -> Self
+    where
+        I: IntoIterator<Item = (&'static str, Value)>,
+    {
+        self.builtins_mut().builtins.extend(builtins);
+        self
+    }
+
+    /// Add additional builtins that are implemented in Nix source code (represented as tuples of
+    /// name and nix source) to this evaluation builder.
+    ///
+    /// # Panics
+    ///
+    /// Panics if this evaluation builder has had globals set via [`with_globals`]
+    pub fn add_src_builtin(mut self, name: &'static str, src: &'static str) -> Self {
+        self.builtins_mut().src_builtins.push((name, src));
+        self
+    }
+
+    /// Set the globals for this evaluation builder to a previously-constructed globals map.
+    /// Intended to allow sharing globals across multiple evaluations (eg for the REPL).
+    ///
+    /// Discards any builtins previously configured via [`add_builtins`] and [`add_src_builtins`].
+    /// If either of those methods is called on the evaluation builder after this one, they will
+    /// panic.
+    pub fn with_globals(self, globals: Rc<GlobalsMap>) -> Self {
+        Self {
+            globals: BuilderGlobals::Globals(globals),
+            ..self
+        }
+    }
+
+    pub fn with_source_map(self, source_map: SourceCode) -> Self {
+        debug_assert!(
+            self.source_map.is_none(),
+            "Cannot set the source_map on an EvaluationBuilder twice"
+        );
+        Self {
+            source_map: Some(source_map),
+            ..self
+        }
+    }
+
+    pub fn with_strict(self, strict: bool) -> Self {
+        Self { strict, ..self }
+    }
+
+    pub fn strict(self) -> Self {
+        self.with_strict(true)
+    }
+
+    pub fn nix_path(self, nix_path: Option<String>) -> Self {
+        Self { nix_path, ..self }
+    }
+
+    pub fn env(self, env: Option<&'env FxHashMap<SmolStr, Value>>) -> Self {
+        Self { env, ..self }
+    }
+
+    pub fn compiler_observer(
+        self,
+        compiler_observer: Option<&'co mut dyn CompilerObserver>,
+    ) -> Self {
+        Self {
+            compiler_observer,
+            ..self
+        }
+    }
+
+    pub fn set_compiler_observer(
+        &mut self,
+        compiler_observer: Option<&'co mut dyn CompilerObserver>,
+    ) {
+        self.compiler_observer = compiler_observer;
+    }
+
+    pub fn runtime_observer(self, runtime_observer: Option<&'ro mut dyn RuntimeObserver>) -> Self {
+        Self {
+            runtime_observer,
+            ..self
+        }
+    }
+
+    pub fn set_runtime_observer(&mut self, runtime_observer: Option<&'ro mut dyn RuntimeObserver>) {
+        self.runtime_observer = runtime_observer;
+    }
+}
+
+impl<'co, 'ro, 'env, IO> EvaluationBuilder<'co, 'ro, 'env, IO> {
+    pub fn source_map(&mut self) -> &SourceCode {
+        self.source_map.get_or_insert_with(SourceCode::default)
+    }
+}
+
+impl<'co, 'ro, 'env> EvaluationBuilder<'co, 'ro, 'env, Box<dyn EvalIO>> {
+    /// Initialize an `Evaluation`, without the import statement available, and
+    /// all IO operations stubbed out.
+    pub fn new_pure() -> Self {
+        Self::new(Box::new(DummyIO) as Box<dyn EvalIO>).with_enable_import(false)
+    }
+
+    #[cfg(feature = "impure")]
+    /// Configure an `Evaluation` to have impure features available
+    /// with the given I/O implementation.
+    ///
+    /// If no I/O implementation is supplied, [`StdIO`] is used by
+    /// default.
+    pub fn enable_impure(mut self, io: Option<Box<dyn EvalIO>>) -> Self {
+        self.io_handle = io.unwrap_or_else(|| Box::new(StdIO) as Box<dyn EvalIO>);
+        self.enable_import = true;
+        self.builtins_mut()
+            .builtins
+            .extend(builtins::impure_builtins());
+
+        // Make `NIX_PATH` resolutions work by default, unless the
+        // user already overrode this with something else.
+        if self.nix_path.is_none() {
+            self.nix_path = std::env::var("NIX_PATH").ok();
+        }
+        self
+    }
+
+    #[cfg(feature = "impure")]
+    /// Initialise an `Evaluation`, with all impure features turned on by default.
+    pub fn new_impure() -> Self {
+        Self::new_pure().enable_impure(None)
+    }
+}
+
 /// An `Evaluation` represents how a piece of Nix code is evaluated. It can be
 /// instantiated and configured directly, or it can be accessed through the
 /// various simplified helper methods available below.
 ///
 /// Public fields are intended to be set by the caller. Setting all
 /// fields is optional.
-pub struct Evaluation<'co, 'ro, IO> {
+pub struct Evaluation<'co, 'ro, 'env, IO> {
     /// Source code map used for error reporting.
     source_map: SourceCode,
 
-    /// Set of all builtins that should be available during the
-    /// evaluation.
-    ///
-    /// This defaults to all pure builtins. Users might want to add
-    /// the set of impure builtins, or other custom builtins.
-    pub builtins: Vec<(&'static str, Value)>,
+    /// Set of all global values available at the top-level scope
+    globals: Rc<GlobalsMap>,
 
-    /// Set of builtins that are implemented in Nix itself and should
-    /// be compiled and inserted in the builtins set.
-    pub src_builtins: Vec<(&'static str, &'static str)>,
+    /// Top-level variables to define in the evaluation
+    env: Option<&'env FxHashMap<SmolStr, Value>>,
 
     /// Implementation of file-IO to use during evaluation, e.g. for
     /// impure builtins.
     ///
     /// Defaults to [`DummyIO`] if not set explicitly.
-    pub io_handle: IO,
-
-    /// Determines whether the `import` builtin should be made
-    /// available. Note that this depends on the `io_handle` being
-    /// able to read the files specified as arguments to `import`.
-    pub enable_import: bool,
+    io_handle: IO,
 
     /// Determines whether the returned value should be strictly
     /// evaluated, that is whether its list and attribute set elements
     /// should be forced recursively.
-    pub strict: bool,
+    strict: bool,
 
     /// (optional) Nix search path, e.g. the value of `NIX_PATH` used
     /// for resolving items on the search path (such as `<nixpkgs>`).
-    pub nix_path: Option<String>,
+    nix_path: Option<String>,
 
     /// (optional) compiler observer for reporting on compilation
     /// details, like the emitted bytecode.
-    pub compiler_observer: Option<&'co mut dyn CompilerObserver>,
+    compiler_observer: Option<&'co mut dyn CompilerObserver>,
 
     /// (optional) runtime observer, for reporting on execution steps
     /// of Nix code.
-    pub runtime_observer: Option<&'ro mut dyn RuntimeObserver>,
+    runtime_observer: Option<&'ro mut dyn RuntimeObserver>,
 }
 
 /// Result of evaluating a piece of Nix code. If evaluation succeeded, a value
@@ -131,73 +397,44 @@ pub struct EvaluationResult {
     pub expr: Option<rnix::ast::Expr>,
 }
 
-impl<'co, 'ro, IO> Evaluation<'co, 'ro, IO>
-where
-    IO: AsRef<dyn EvalIO> + 'static,
-{
-    /// Initialize an `Evaluation`.
-    pub fn new(io_handle: IO, enable_import: bool) -> Self {
-        let mut builtins = builtins::pure_builtins();
-        builtins.extend(builtins::placeholders()); // these are temporary
+impl<'co, 'ro, 'env, IO> Evaluation<'co, 'ro, 'env, IO> {
+    /// Make a new [builder][] for configuring an evaluation
+    ///
+    /// [builder]: EvaluationBuilder
+    pub fn builder(io_handle: IO) -> EvaluationBuilder<'co, 'ro, 'env, IO> {
+        EvaluationBuilder::new(io_handle)
+    }
 
-        Self {
-            source_map: SourceCode::default(),
-            enable_import,
-            io_handle,
-            builtins,
-            src_builtins: vec![],
-            strict: false,
-            nix_path: None,
-            compiler_observer: None,
-            runtime_observer: None,
-        }
+    /// Clone the reference to the map of Nix globals for this evaluation. If [`Value`]s are shared
+    /// across subsequent [`Evaluation`]s, it is important that those evaluations all have the same
+    /// underlying globals map.
+    pub fn globals(&self) -> Rc<GlobalsMap> {
+        self.globals.clone()
     }
-}
 
-impl<'co, 'ro> Evaluation<'co, 'ro, Box<dyn EvalIO>> {
-    /// Initialize an `Evaluation`, without the import statement available, and
-    /// all IO operations stubbed out.
-    pub fn new_pure() -> Self {
-        Self::new(Box::new(DummyIO) as Box<dyn EvalIO>, false)
+    /// Clone the reference to the contained source code map. This is used after an evaluation for
+    /// pretty error printing. Also, if [`Value`]s are shared across subsequent [`Evaluation`]s, it
+    /// is important that those evaluations all have the same underlying source code map.
+    pub fn source_map(&self) -> SourceCode {
+        self.source_map.clone()
     }
+}
 
+impl<'co, 'ro, 'env> Evaluation<'co, 'ro, 'env, Box<dyn EvalIO>> {
     #[cfg(feature = "impure")]
-    /// Configure an `Evaluation` to have impure features available
-    /// with the given I/O implementation.
-    ///
-    /// If no I/O implementation is supplied, [`StdIO`] is used by
-    /// default.
-    pub fn enable_impure(&mut self, io: Option<Box<dyn EvalIO>>) {
-        self.io_handle = io.unwrap_or_else(|| Box::new(StdIO) as Box<dyn EvalIO>);
-        self.enable_import = true;
-        self.builtins.extend(builtins::impure_builtins());
-
-        // Make `NIX_PATH` resolutions work by default, unless the
-        // user already overrode this with something else.
-        if self.nix_path.is_none() {
-            self.nix_path = std::env::var("NIX_PATH").ok();
-        }
+    pub fn builder_impure() -> EvaluationBuilder<'co, 'ro, 'env, Box<dyn EvalIO>> {
+        EvaluationBuilder::new_impure()
     }
 
-    #[cfg(feature = "impure")]
-    /// Initialise an `Evaluation`, with all impure features turned on by default.
-    pub fn new_impure() -> Self {
-        let mut eval = Self::new_pure();
-        eval.enable_impure(None);
-        eval
+    pub fn builder_pure() -> EvaluationBuilder<'co, 'ro, 'env, Box<dyn EvalIO>> {
+        EvaluationBuilder::new_pure()
     }
 }
 
-impl<'co, 'ro, IO> Evaluation<'co, 'ro, IO>
+impl<'co, 'ro, 'env, IO> Evaluation<'co, 'ro, 'env, IO>
 where
     IO: AsRef<dyn EvalIO> + 'static,
 {
-    /// Clone the reference to the contained source code map. This is used after
-    /// an evaluation for pretty error printing.
-    pub fn source_map(&self) -> SourceCode {
-        self.source_map.clone()
-    }
-
     /// Only compile the provided source code, at an optional location of the
     /// source code (i.e. path to the file it was read from; used for error
     /// reporting, and for resolving relative paths in impure functions)
@@ -227,9 +464,8 @@ where
             file,
             location,
             source,
-            self.builtins,
-            self.src_builtins,
-            self.enable_import,
+            self.globals,
+            self.env,
             compiler_observer,
         );
 
@@ -257,20 +493,14 @@ where
         let mut noop_observer = observer::NoOpObserver::default();
         let compiler_observer = self.compiler_observer.take().unwrap_or(&mut noop_observer);
 
-        // Insert a storeDir builtin *iff* a store directory is present.
-        if let Some(store_dir) = self.io_handle.as_ref().store_dir() {
-            self.builtins.push(("storeDir", store_dir.into()));
-        }
-
-        let (lambda, globals) = match parse_compile_internal(
+        let lambda = match parse_compile_internal(
             &mut result,
             code.as_ref(),
             file.clone(),
             location,
             source.clone(),
-            self.builtins,
-            self.src_builtins,
-            self.enable_import,
+            self.globals.clone(),
+            self.env,
             compiler_observer,
         ) {
             None => return result,
@@ -302,7 +532,7 @@ where
             self.io_handle,
             runtime_observer,
             source.clone(),
-            globals,
+            self.globals,
             lambda,
             self.strict,
         );
@@ -339,11 +569,10 @@ fn parse_compile_internal(
     file: Arc<codemap::File>,
     location: Option<PathBuf>,
     source: SourceCode,
-    builtins: Vec<(&'static str, Value)>,
-    src_builtins: Vec<(&'static str, &'static str)>,
-    enable_import: bool,
+    globals: Rc<GlobalsMap>,
+    env: Option<&FxHashMap<SmolStr, Value>>,
     compiler_observer: &mut dyn CompilerObserver,
-) -> Option<(Rc<Lambda>, Rc<GlobalsMap>)> {
+) -> Option<Rc<Lambda>> {
     let parsed = rnix::ast::Root::parse(code);
     let parse_errors = parsed.errors();
 
@@ -361,13 +590,11 @@ fn parse_compile_internal(
     // the result, in case the caller needs it for something.
     result.expr = parsed.tree().expr();
 
-    let builtins =
-        crate::compiler::prepare_globals(builtins, src_builtins, source.clone(), enable_import);
-
     let compiler_result = match compiler::compile(
         result.expr.as_ref().unwrap(),
         location,
-        builtins,
+        globals,
+        env,
         &source,
         &file,
         compiler_observer,
@@ -390,5 +617,5 @@ fn parse_compile_internal(
 
     // Return the lambda (for execution) and the globals map (to
     // ensure the invariant that the globals outlive the runtime).
-    Some((compiler_result.lambda, compiler_result.globals))
+    Some(compiler_result.lambda)
 }
diff --git a/tvix/eval/src/nix_search_path.rs b/tvix/eval/src/nix_search_path.rs
index 566ca122384b..369c5b6857ba 100644
--- a/tvix/eval/src/nix_search_path.rs
+++ b/tvix/eval/src/nix_search_path.rs
@@ -197,6 +197,8 @@ mod tests {
         }
     }
 
+    // this uses StdIO, which is only available with the impure feature.
+    #[cfg(feature = "impure")]
     mod resolve {
         use crate::StdIO;
         use path_clean::PathClean;
diff --git a/tvix/eval/src/observer.rs b/tvix/eval/src/observer.rs
index f5de399315c7..5e6526418b3b 100644
--- a/tvix/eval/src/observer.rs
+++ b/tvix/eval/src/observer.rs
@@ -13,7 +13,7 @@ use tabwriter::TabWriter;
 
 use crate::chunk::Chunk;
 use crate::generators::VMRequest;
-use crate::opcode::{CodeIdx, OpCode};
+use crate::opcode::{CodeIdx, Op};
 use crate::value::Lambda;
 use crate::SourceCode;
 use crate::Value;
@@ -73,7 +73,7 @@ pub trait RuntimeObserver {
 
     /// Called when the runtime *begins* executing an instruction. The
     /// provided stack is the state at the beginning of the operation.
-    fn observe_execute_op(&mut self, _ip: CodeIdx, _: &OpCode, _: &[Value]) {}
+    fn observe_execute_op(&mut self, _ip: CodeIdx, _: &Op, _: &[Value]) {}
 }
 
 #[derive(Default)]
@@ -112,8 +112,12 @@ impl<W: Write> DisassemblingObserver<W> {
         // calculate width of the widest address in the chunk
         let width = format!("{:#x}", chunk.code.len() - 1).len();
 
-        for (idx, _) in chunk.code.iter().enumerate() {
-            let _ = chunk.disassemble_op(&mut self.writer, &self.source, width, CodeIdx(idx));
+        let mut idx = 0;
+        while idx < chunk.code.len() {
+            let size = chunk
+                .disassemble_op(&mut self.writer, &self.source, width, CodeIdx(idx))
+                .expect("writing debug output should work");
+            idx += size;
         }
     }
 }
@@ -304,7 +308,7 @@ impl<W: Write> RuntimeObserver for TracingObserver<W> {
         );
     }
 
-    fn observe_execute_op(&mut self, ip: CodeIdx, op: &OpCode, stack: &[Value]) {
+    fn observe_execute_op(&mut self, ip: CodeIdx, op: &Op, stack: &[Value]) {
         self.maybe_write_time();
         let _ = write!(&mut self.writer, "{:04} {:?}\t", ip.0, op);
         self.write_stack(stack);
diff --git a/tvix/eval/src/opcode.rs b/tvix/eval/src/opcode.rs
index f89c1c12e7fd..ddf1304b3aea 100644
--- a/tvix/eval/src/opcode.rs
+++ b/tvix/eval/src/opcode.rs
@@ -52,8 +52,7 @@ pub struct JumpOffset(pub usize);
 #[derive(Clone, Copy, Debug, PartialEq, Eq)]
 pub struct Count(pub usize);
 
-/// All variants of this enum carry a bounded amount of data to
-/// ensure that no heap allocations are needed for an Opcode.
+/// Op represents all instructions in the Tvix abstract machine.
 ///
 /// In documentation comments, stack positions are referred to by
 /// indices written in `{}` as such, where required:
@@ -70,187 +69,182 @@ pub struct Count(pub usize);
 ///
 /// Unless otherwise specified, operations leave their result at the
 /// top of the stack.
-#[warn(variant_size_differences)]
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
-pub enum OpCode {
+#[repr(u8)]
+#[derive(Debug, PartialEq, Eq)]
+pub enum Op {
     /// Push a constant onto the stack.
-    OpConstant(ConstantIdx),
+    Constant,
 
-    // Unary operators
-    /// Discard a value from the stack.
-    OpPop,
+    /// Discard the value on top of the stack.
+    Pop,
 
     /// Invert the boolean at the top of the stack.
-    OpInvert,
+    Invert,
 
-    // Binary operators
     /// Invert the sign of the number at the top of the stack.
-    OpNegate,
+    Negate,
 
     /// Sum up the two numbers at the top of the stack.
-    OpAdd,
+    Add,
 
     /// Subtract the number at {1} from the number at {2}.
-    OpSub,
+    Sub,
 
     /// Multiply the two numbers at the top of the stack.
-    OpMul,
+    Mul,
 
     /// Divide the two numbers at the top of the stack.
-    OpDiv,
+    Div,
 
-    // Comparison operators
     /// Check the two values at the top of the stack for Nix-equality.
-    OpEqual,
+    Equal,
 
     /// Check whether the value at {2} is less than {1}.
-    OpLess,
+    Less,
 
     /// Check whether the value at {2} is less than or equal to {1}.
-    OpLessOrEq,
+    LessOrEq,
 
     /// Check whether the value at {2} is greater than {1}.
-    OpMore,
+    More,
 
     /// Check whether the value at {2} is greater than or equal to {1}.
-    OpMoreOrEq,
+    MoreOrEq,
 
-    // Logical operators & generic jumps
     /// Jump forward in the bytecode specified by the number of
     /// instructions in its usize operand.
-    OpJump(JumpOffset),
+    Jump,
 
     /// Jump forward in the bytecode specified by the number of
     /// instructions in its usize operand, *if* the value at the top
     /// of the stack is `true`.
-    OpJumpIfTrue(JumpOffset),
+    JumpIfTrue,
 
     /// Jump forward in the bytecode specified by the number of
     /// instructions in its usize operand, *if* the value at the top
     /// of the stack is `false`.
-    OpJumpIfFalse(JumpOffset),
+    JumpIfFalse,
 
     /// Pop one stack item and jump forward in the bytecode
     /// specified by the number of instructions in its usize
     /// operand, *if* the value at the top of the stack is a
     /// Value::Catchable.
-    OpJumpIfCatchable(JumpOffset),
+    JumpIfCatchable,
 
     /// Jump forward in the bytecode specified by the number of
     /// instructions in its usize operand, *if* the value at the top
     /// of the stack is the internal value representing a missing
     /// attribute set key.
-    OpJumpIfNotFound(JumpOffset),
+    JumpIfNotFound,
 
     /// Jump forward in the bytecode specified by the number of
     /// instructions in its usize operand, *if* the value at the top
     /// of the stack is *not* the internal value requesting a
     /// stack value finalisation.
-    OpJumpIfNoFinaliseRequest(JumpOffset),
+    JumpIfNoFinaliseRequest,
+
+    /// Construct an attribute set from the given number of key-value pairs on
+    /// the top of the stack. The operand gives the count of *pairs*, not the
+    /// number of *stack values* - the actual number of values popped off the
+    /// stack will be twice the argument to this op.
+    Attrs,
 
-    // Attribute sets
-    /// Construct an attribute set from the given number of key-value pairs on the top of the stack
-    ///
-    /// Note that this takes the count of *pairs*, not the number of *stack values* - the actual
-    /// number of values popped off the stack will be twice the argument to this op
-    OpAttrs(Count),
     /// Merge the attribute set at {2} into the attribute set at {1},
     /// and leave the new set at the top of the stack.
-    OpAttrsUpdate,
+    AttrsUpdate,
 
     /// Select the attribute with the name at {1} from the set at {2}.
-    OpAttrsSelect,
+    AttrsSelect,
 
     /// Select the attribute with the name at {1} from the set at {2}, but leave
     /// a `Value::AttrNotFound` in the stack instead of failing if it is
     /// missing.
-    OpAttrsTrySelect,
+    AttrsTrySelect,
 
     /// Check for the presence of the attribute with the name at {1} in the set
     /// at {2}.
-    OpHasAttr,
+    HasAttr,
 
     /// Throw an error if the attribute set at the top of the stack has any attributes
     /// other than those listed in the formals of the current lambda
     ///
     /// Panics if the current frame is not a lambda with formals
-    OpValidateClosedFormals,
+    ValidateClosedFormals,
 
-    // `with`-handling
     /// Push a value onto the runtime `with`-stack to enable dynamic identifier
     /// resolution. The absolute stack index of the value is supplied as a usize
     /// operand.
-    OpPushWith(StackIdx),
+    PushWith,
 
     /// Pop the last runtime `with`-stack element.
-    OpPopWith,
+    PopWith,
 
     /// Dynamically resolve an identifier with the name at {1} from the runtime
     /// `with`-stack.
-    OpResolveWith,
+    ResolveWith,
 
     // Lists
     /// Construct a list from the given number of values at the top of the
     /// stack.
-    OpList(Count),
+    List,
 
     /// Concatenate the lists at {2} and {1}.
-    OpConcat,
+    Concat,
 
     // Strings
     /// Interpolate the given number of string fragments into a single string.
-    OpInterpolate(Count),
+    Interpolate,
 
     /// Force the Value on the stack and coerce it to a string
-    OpCoerceToString(crate::CoercionKind),
+    CoerceToString,
 
     // Paths
     /// Attempt to resolve the Value on the stack using the configured [`NixSearchPath`][]
     ///
     /// [`NixSearchPath`]: crate::nix_search_path::NixSearchPath
-    OpFindFile,
+    FindFile,
 
     /// Attempt to resolve a path literal relative to the home dir
-    OpResolveHomePath,
+    ResolveHomePath,
 
     // Type assertion operators
     /// Assert that the value at {1} is a boolean, and fail with a runtime error
     /// otherwise.
-    OpAssertBool,
-    OpAssertAttrs,
+    AssertBool,
+    AssertAttrs,
 
     /// Access local identifiers with statically known positions.
-    OpGetLocal(StackIdx),
+    GetLocal,
 
     /// Close scopes while leaving their expression value around.
-    OpCloseScope(Count), // number of locals to pop
+    CloseScope,
 
     /// Return an error indicating that an `assert` failed
-    OpAssertFail,
+    AssertFail,
 
     // Lambdas & closures
     /// Call the value at {1} in a new VM callframe
-    OpCall,
+    Call,
 
     /// Retrieve the upvalue at the given index from the closure or thunk
     /// currently under evaluation.
-    OpGetUpvalue(UpvalueIdx),
+    GetUpvalue,
 
     /// Construct a closure which has upvalues but no self-references
-    OpClosure(ConstantIdx),
+    Closure,
 
     /// Construct a closure which has self-references (direct or via upvalues)
-    OpThunkClosure(ConstantIdx),
+    ThunkClosure,
 
     /// Construct a suspended thunk, used to delay a computation for laziness.
-    OpThunkSuspended(ConstantIdx),
+    ThunkSuspended,
 
     /// Force the value at {1} until it is a `Thunk::Evaluated`.
-    OpForce,
+    Force,
 
     /// Finalise initialisation of the upvalues of the value in the given stack
     /// index (which must be a Value::Thunk) after the scope is fully bound.
-    OpFinalise(StackIdx),
+    Finalise,
 
     /// Final instruction emitted in a chunk. Does not have an
     /// inherent effect, but can simplify VM logic as a marker in some
@@ -258,27 +252,140 @@ pub enum OpCode {
     ///
     /// Can be thought of as "returning" the value to the parent
     /// frame, hence the name.
-    OpReturn,
-
-    // [`OpClosure`], [`OpThunkSuspended`], and [`OpThunkClosure`] have a
-    // variable number of arguments to the instruction, which is
-    // represented here by making their data part of the opcodes.
-    // Each of these two opcodes has a `ConstantIdx`, which must
-    // reference a `Value::Blueprint(Lambda)`.  The `upvalue_count`
-    // field in that `Lambda` indicates the number of arguments it
-    // takes, and the opcode must be followed by exactly this number
-    // of `Data*` opcodes.  The VM skips over these by advancing the
-    // instruction pointer.
-    //
-    // It is illegal for a `Data*` opcode to appear anywhere else.
-    /// Populate a static upvalue by copying from the stack immediately.
-    DataStackIdx(StackIdx),
-    /// Populate a static upvalue of a thunk by copying it the stack, but do
-    /// when the thunk is finalised (by OpFinalise) rather than immediately.
-    DataDeferredLocal(StackIdx),
-    /// Populate a static upvalue by copying it from the upvalues of an
-    /// enclosing scope.
-    DataUpvalueIdx(UpvalueIdx),
-    /// Populate dynamic upvalues by saving a copy of the with-stack.
-    DataCaptureWith,
+    Return,
+
+    /// Sentinel value to signal invalid bytecode. This MUST always be the last
+    /// value in the enum. Do not move it!
+    Invalid,
+}
+
+const _ASSERT_SMALL_OP: () = assert!(std::mem::size_of::<Op>() == 1);
+
+impl From<u8> for Op {
+    fn from(num: u8) -> Self {
+        if num >= Self::Invalid as u8 {
+            return Self::Invalid;
+        }
+
+        // SAFETY: As long as `Invalid` remains the last variant of the enum,
+        // and as long as variant values are not specified manually, this
+        // conversion is safe.
+        unsafe { std::mem::transmute(num) }
+    }
+}
+
+pub enum OpArg {
+    None,
+    Uvarint,
+    Fixed,
+    Custom,
+}
+
+impl Op {
+    pub fn arg_type(&self) -> OpArg {
+        match self {
+            Op::Constant
+            | Op::Attrs
+            | Op::PushWith
+            | Op::List
+            | Op::Interpolate
+            | Op::GetLocal
+            | Op::CloseScope
+            | Op::GetUpvalue
+            | Op::Finalise => OpArg::Uvarint,
+
+            Op::Jump
+            | Op::JumpIfTrue
+            | Op::JumpIfFalse
+            | Op::JumpIfCatchable
+            | Op::JumpIfNotFound
+            | Op::JumpIfNoFinaliseRequest => OpArg::Fixed,
+
+            Op::CoerceToString | Op::Closure | Op::ThunkClosure | Op::ThunkSuspended => {
+                OpArg::Custom
+            }
+            _ => OpArg::None,
+        }
+    }
+}
+
+/// Position is used to represent where to capture an upvalue from.
+#[derive(Clone, Copy)]
+pub struct Position(pub u64);
+
+impl Position {
+    pub fn stack_index(idx: StackIdx) -> Self {
+        Position((idx.0 as u64) << 2)
+    }
+
+    pub fn deferred_local(idx: StackIdx) -> Self {
+        Position(((idx.0 as u64) << 2) | 1)
+    }
+
+    pub fn upvalue_index(idx: UpvalueIdx) -> Self {
+        Position(((idx.0 as u64) << 2) | 2)
+    }
+
+    pub fn runtime_stack_index(&self) -> Option<StackIdx> {
+        if (self.0 & 0b11) == 0 {
+            return Some(StackIdx((self.0 >> 2) as usize));
+        }
+
+        None
+    }
+
+    pub fn runtime_deferred_local(&self) -> Option<StackIdx> {
+        if (self.0 & 0b11) == 1 {
+            return Some(StackIdx((self.0 >> 2) as usize));
+        }
+
+        None
+    }
+
+    pub fn runtime_upvalue_index(&self) -> Option<UpvalueIdx> {
+        if (self.0 & 0b11) == 2 {
+            return Some(UpvalueIdx((self.0 >> 2) as usize));
+        }
+
+        None
+    }
+}
+
+#[cfg(test)]
+mod position_tests {
+    use super::Position; // he-he
+    use super::{StackIdx, UpvalueIdx};
+
+    #[test]
+    fn test_stack_index_position() {
+        let idx = StackIdx(42);
+        let pos = Position::stack_index(idx);
+        let result = pos.runtime_stack_index();
+
+        assert_eq!(result, Some(idx));
+        assert_eq!(pos.runtime_deferred_local(), None);
+        assert_eq!(pos.runtime_upvalue_index(), None);
+    }
+
+    #[test]
+    fn test_deferred_local_position() {
+        let idx = StackIdx(42);
+        let pos = Position::deferred_local(idx);
+        let result = pos.runtime_deferred_local();
+
+        assert_eq!(result, Some(idx));
+        assert_eq!(pos.runtime_stack_index(), None);
+        assert_eq!(pos.runtime_upvalue_index(), None);
+    }
+
+    #[test]
+    fn test_upvalue_index_position() {
+        let idx = UpvalueIdx(42);
+        let pos = Position::upvalue_index(idx);
+        let result = pos.runtime_upvalue_index();
+
+        assert_eq!(result, Some(idx));
+        assert_eq!(pos.runtime_stack_index(), None);
+        assert_eq!(pos.runtime_deferred_local(), None);
+    }
 }
diff --git a/tvix/eval/src/spans.rs b/tvix/eval/src/spans.rs
index f422093b0d12..df2b9a725562 100644
--- a/tvix/eval/src/spans.rs
+++ b/tvix/eval/src/spans.rs
@@ -5,37 +5,6 @@ use codemap::{File, Span};
 use rnix::ast;
 use rowan::ast::AstNode;
 
-/// Helper struct to carry information required for making a span, but
-/// without actually performing the (expensive) span lookup.
-///
-/// This is used for tracking spans across thunk boundaries, as they
-/// are frequently instantiated but spans are only used in error or
-/// warning cases.
-#[derive(Clone, Debug)]
-pub enum LightSpan {
-    /// The span has already been computed and can just be used right
-    /// away.
-    Actual { span: Span },
-}
-
-impl LightSpan {
-    pub fn new_actual(span: Span) -> Self {
-        Self::Actual { span }
-    }
-
-    pub fn span(&self) -> Span {
-        match self {
-            LightSpan::Actual { span } => *span,
-        }
-    }
-}
-
-impl From<Span> for LightSpan {
-    fn from(span: Span) -> Self {
-        LightSpan::Actual { span }
-    }
-}
-
 /// Trait implemented by all types from which we can retrieve a span.
 pub trait ToSpan {
     fn span_for(&self, file: &File) -> Span;
@@ -66,6 +35,33 @@ impl ToSpan for rnix::SyntaxNode {
     }
 }
 
+/// A placeholder [`ToSpan`] implementation covering the entire source file.
+#[derive(Debug, Clone, Copy)]
+pub struct EntireFile;
+
+impl ToSpan for EntireFile {
+    fn span_for(&self, file: &File) -> Span {
+        file.span
+    }
+}
+
+/// A placeholder [`ToSpan`] implementation which falls back to the entire file if its wrapped value
+/// is [`None`]
+#[derive(Debug, Clone, Copy)]
+pub struct OrEntireFile<T>(pub Option<T>);
+
+impl<T> ToSpan for OrEntireFile<T>
+where
+    T: ToSpan,
+{
+    fn span_for(&self, file: &File) -> Span {
+        match &self.0 {
+            Some(t) => t.span_for(file),
+            None => EntireFile.span_for(file),
+        }
+    }
+}
+
 /// Generates a `ToSpan` implementation for a type implementing
 /// `rowan::AstNode`. This is impossible to do as a blanket
 /// implementation because `rustc` forbids these implementations for
diff --git a/tvix/eval/src/tests/mod.rs b/tvix/eval/src/tests/mod.rs
index 5a7708e298e8..21b5d35e6af0 100644
--- a/tvix/eval/src/tests/mod.rs
+++ b/tvix/eval/src/tests/mod.rs
@@ -1,203 +1,6 @@
-use crate::{value::Value, EvalIO};
-use builtin_macros::builtins;
-use pretty_assertions::assert_eq;
-use rstest::rstest;
-use std::path::PathBuf;
-
 /// Module for one-off tests which do not follow the rest of the
 /// test layout.
 mod one_offs;
 
-#[builtins]
-mod mock_builtins {
-    //! Builtins which are required by language tests, but should not
-    //! actually exist in //tvix/eval.
-    use crate as tvix_eval;
-    use crate::generators::GenCo;
-    use crate::*;
-    use genawaiter::rc::Gen;
-
-    #[builtin("derivation")]
-    async fn builtin_derivation(co: GenCo, input: Value) -> Result<Value, ErrorKind> {
-        let input = input.to_attrs()?;
-        let attrs = input.update(NixAttrs::from_iter(
-            [
-                (
-                    "outPath",
-                    "/nix/store/00000000000000000000000000000000-mock",
-                ),
-                (
-                    "drvPath",
-                    "/nix/store/00000000000000000000000000000000-mock.drv",
-                ),
-                ("type", "derivation"),
-            ]
-            .into_iter(),
-        ));
-
-        Ok(Value::Attrs(Box::new(attrs)))
-    }
-}
-
-fn eval_test(code_path: PathBuf, expect_success: bool) {
-    std::env::set_var("TEST_VAR", "foo"); // for eval-okay-getenv.nix
-
-    eprintln!("path: {}", code_path.display());
-    assert_eq!(
-        code_path.extension().unwrap(),
-        "nix",
-        "test files always end in .nix"
-    );
-
-    let code = std::fs::read_to_string(&code_path).expect("should be able to read test code");
-
-    let mut eval = crate::Evaluation::new_impure();
-    eval.strict = true;
-    eval.builtins.extend(mock_builtins::builtins());
-
-    let result = eval.evaluate(code, Some(code_path.clone()));
-    let failed = match result.value {
-        Some(Value::Catchable(_)) => true,
-        _ => !result.errors.is_empty(),
-    };
-    if expect_success && failed {
-        panic!(
-            "{}: evaluation of eval-okay test should succeed, but failed with {:?}",
-            code_path.display(),
-            result.errors,
-        );
-    }
-
-    if !expect_success && failed {
-        return;
-    }
-    // !expect_success can also mean the output differs, so don't panic if the
-    // evaluation didn't fail.
-
-    let value = result.value.unwrap();
-    let result_str = value.to_string();
-
-    let exp_path = code_path.with_extension("exp");
-    if exp_path.exists() {
-        // If there's an .exp file provided alongside, compare it with the
-        // output of the NixValue .to_string() method.
-        let exp_str = std::fs::read_to_string(&exp_path).expect("unable to read .exp file");
-
-        if expect_success {
-            assert_eq!(
-                result_str,
-                exp_str.trim(),
-                "{}: result value representation (left) must match expectation (right)",
-                code_path.display()
-            );
-        } else {
-            assert_ne!(
-                result_str,
-                exp_str.trim(),
-                "{}: test passed unexpectedly!  consider moving it out of notyetpassing",
-                code_path.display()
-            );
-
-            // Early return here, we don't compare .xml outputs if this is a !
-            // expect_success test.
-            return;
-        }
-    }
-
-    let exp_xml_path = code_path.with_extension("exp.xml");
-    if exp_xml_path.exists() {
-        // If there's an XML file provided alongside, compare it with the
-        // output produced when serializing the Value as XML.
-        let exp_xml_str = std::fs::read_to_string(exp_xml_path).expect("unable to read .xml file");
-
-        let mut xml_actual_buf = Vec::new();
-        crate::builtins::value_to_xml(&mut xml_actual_buf, &value).expect("value_to_xml failed");
-
-        assert_eq!(
-            String::from_utf8(xml_actual_buf).expect("to_xml produced invalid utf-8"),
-            exp_xml_str,
-            "{}: result value representation (left) must match expectation (right)",
-            code_path.display()
-        );
-    }
-}
-
-// identity-* tests contain Nix code snippets which should evaluate to
-// themselves exactly (i.e. literals).
-#[rstest]
-fn identity(#[files("src/tests/tvix_tests/identity-*.nix")] code_path: PathBuf) {
-    let code = std::fs::read_to_string(code_path).expect("should be able to read test code");
-
-    let mut eval = crate::Evaluation::new(Box::new(crate::StdIO) as Box<dyn EvalIO>, false);
-    eval.strict = true;
-
-    let result = eval.evaluate(&code, None);
-    assert!(
-        result.errors.is_empty(),
-        "evaluation of identity test failed: {:?}",
-        result.errors
-    );
-
-    let result_str = result.value.unwrap().to_string();
-
-    assert_eq!(
-        result_str,
-        code.trim(),
-        "result value representation (left) must match expectation (right)"
-    )
-}
-
-// eval-okay-* tests contain a snippet of Nix code, and an expectation
-// of the produced string output of the evaluator.
-//
-// These evaluations are always supposed to succeed, i.e. all snippets
-// are guaranteed to be valid Nix code.
-#[rstest]
-fn eval_okay(#[files("src/tests/tvix_tests/eval-okay-*.nix")] code_path: PathBuf) {
-    eval_test(code_path, true)
-}
-
-// eval-okay-* tests from the original Nix test suite.
-#[cfg(feature = "nix_tests")]
-#[rstest]
-fn nix_eval_okay(#[files("src/tests/nix_tests/eval-okay-*.nix")] code_path: PathBuf) {
-    eval_test(code_path, true)
-}
-
-// eval-okay-* tests from the original Nix test suite which do not yet pass for tvix
-//
-// Eventually there will be none of these left, and this function
-// will disappear :)
-//
-// Please don't submit failing tests unless they're in
-// notyetpassing; this makes the test suite much more useful for
-// regression testing, since there should always be zero non-ignored
-// failing tests.
-#[rstest]
-fn nix_eval_okay_currently_failing(
-    #[files("src/tests/nix_tests/notyetpassing/eval-okay-*.nix")] code_path: PathBuf,
-) {
-    eval_test(code_path, false)
-}
-
-#[rstest]
-fn eval_okay_currently_failing(
-    #[files("src/tests/tvix_tests/notyetpassing/eval-okay-*.nix")] code_path: PathBuf,
-) {
-    eval_test(code_path, false)
-}
-
-// eval-fail-* tests contain a snippet of Nix code, which is
-// expected to fail evaluation.  The exact type of failure
-// (assertion, parse error, etc) is not currently checked.
-#[rstest]
-fn eval_fail(#[files("src/tests/tvix_tests/eval-fail-*.nix")] code_path: PathBuf) {
-    eval_test(code_path, false)
-}
-
-// eval-fail-* tests from the original Nix test suite.
 #[cfg(feature = "nix_tests")]
-#[rstest]
-fn nix_eval_fail(#[files("src/tests/nix_tests/eval-fail-*.nix")] code_path: PathBuf) {
-    eval_test(code_path, false)
-}
+mod nix_tests;
diff --git a/tvix/eval/src/tests/nix_tests.rs b/tvix/eval/src/tests/nix_tests.rs
new file mode 100644
index 000000000000..cdaed193f206
--- /dev/null
+++ b/tvix/eval/src/tests/nix_tests.rs
@@ -0,0 +1,210 @@
+use crate::value::Value;
+use builtin_macros::builtins;
+use pretty_assertions::assert_eq;
+use rstest::rstest;
+use std::path::PathBuf;
+
+#[builtins]
+mod mock_builtins {
+    //! Builtins which are required by language tests, but should not
+    //! actually exist in //tvix/eval.
+    use crate as tvix_eval;
+    use crate::generators::GenCo;
+    use crate::*;
+    use genawaiter::rc::Gen;
+
+    #[builtin("derivation")]
+    async fn builtin_derivation(co: GenCo, input: Value) -> Result<Value, ErrorKind> {
+        let input = input.to_attrs()?;
+        let attrs = input.update(NixAttrs::from_iter(
+            [
+                (
+                    "outPath",
+                    "/nix/store/00000000000000000000000000000000-mock",
+                ),
+                (
+                    "drvPath",
+                    "/nix/store/00000000000000000000000000000000-mock.drv",
+                ),
+                ("type", "derivation"),
+            ]
+            .into_iter(),
+        ));
+
+        Ok(Value::Attrs(Box::new(attrs)))
+    }
+}
+
+#[cfg(feature = "impure")]
+fn eval_test(code_path: PathBuf, expect_success: bool) {
+    std::env::set_var("TEST_VAR", "foo"); // for eval-okay-getenv.nix
+
+    eprintln!("path: {}", code_path.display());
+    assert_eq!(
+        code_path.extension().unwrap(),
+        "nix",
+        "test files always end in .nix"
+    );
+
+    let code = std::fs::read_to_string(&code_path).expect("should be able to read test code");
+
+    let eval = crate::Evaluation::builder_impure()
+        .strict()
+        .add_builtins(mock_builtins::builtins())
+        .build();
+
+    let result = eval.evaluate(code, Some(code_path.clone()));
+    let failed = match result.value {
+        Some(Value::Catchable(_)) => true,
+        _ => !result.errors.is_empty(),
+    };
+    if expect_success && failed {
+        panic!(
+            "{}: evaluation of eval-okay test should succeed, but failed with {:?}",
+            code_path.display(),
+            result.errors,
+        );
+    }
+
+    if !expect_success && failed {
+        return;
+    }
+    // !expect_success can also mean the output differs, so don't panic if the
+    // evaluation didn't fail.
+
+    let value = result.value.unwrap();
+    let result_str = value.to_string();
+
+    let exp_path = code_path.with_extension("exp");
+    if exp_path.exists() {
+        // If there's an .exp file provided alongside, compare it with the
+        // output of the NixValue .to_string() method.
+        let exp_str = std::fs::read_to_string(&exp_path).expect("unable to read .exp file");
+
+        if expect_success {
+            assert_eq!(
+                result_str,
+                exp_str.trim(),
+                "{}: result value representation (left) must match expectation (right)",
+                code_path.display()
+            );
+        } else {
+            assert_ne!(
+                result_str,
+                exp_str.trim(),
+                "{}: test passed unexpectedly!  consider moving it out of notyetpassing",
+                code_path.display()
+            );
+
+            // Early return here, we don't compare .xml outputs if this is a !
+            // expect_success test.
+            return;
+        }
+    }
+
+    let exp_xml_path = code_path.with_extension("exp.xml");
+    if exp_xml_path.exists() {
+        // If there's an XML file provided alongside, compare it with the
+        // output produced when serializing the Value as XML.
+        let exp_xml_str = std::fs::read_to_string(exp_xml_path).expect("unable to read .xml file");
+
+        let mut xml_actual_buf = Vec::new();
+        crate::builtins::value_to_xml(&mut xml_actual_buf, &value).expect("value_to_xml failed");
+
+        assert_eq!(
+            String::from_utf8(xml_actual_buf).expect("to_xml produced invalid utf-8"),
+            exp_xml_str,
+            "{}: result value representation (left) must match expectation (right)",
+            code_path.display()
+        );
+    }
+}
+
+// identity-* tests contain Nix code snippets which should evaluate to
+// themselves exactly (i.e. literals).
+#[cfg(feature = "impure")]
+#[rstest]
+fn identity(#[files("src/tests/tvix_tests/identity-*.nix")] code_path: PathBuf) {
+    use crate::EvalIO;
+
+    let code = std::fs::read_to_string(code_path).expect("should be able to read test code");
+
+    let eval = crate::Evaluation::builder(Box::new(crate::StdIO) as Box<dyn EvalIO>)
+        .disable_import()
+        .strict()
+        .build();
+
+    let result = eval.evaluate(&code, None);
+    assert!(
+        result.errors.is_empty(),
+        "evaluation of identity test failed: {:?}",
+        result.errors
+    );
+
+    let result_str = result.value.unwrap().to_string();
+
+    assert_eq!(
+        result_str,
+        code.trim(),
+        "result value representation (left) must match expectation (right)"
+    )
+}
+
+// eval-okay-* tests contain a snippet of Nix code, and an expectation
+// of the produced string output of the evaluator.
+//
+// These evaluations are always supposed to succeed, i.e. all snippets
+// are guaranteed to be valid Nix code.
+#[cfg(feature = "impure")]
+#[rstest]
+fn eval_okay(#[files("src/tests/tvix_tests/eval-okay-*.nix")] code_path: PathBuf) {
+    eval_test(code_path, true)
+}
+
+// eval-okay-* tests from the original Nix test suite.
+#[cfg(feature = "impure")]
+#[rstest]
+fn nix_eval_okay(#[files("src/tests/nix_tests/eval-okay-*.nix")] code_path: PathBuf) {
+    eval_test(code_path, true)
+}
+
+// eval-okay-* tests from the original Nix test suite which do not yet pass for tvix
+//
+// Eventually there will be none of these left, and this function
+// will disappear :)
+//
+// Please don't submit failing tests unless they're in
+// notyetpassing; this makes the test suite much more useful for
+// regression testing, since there should always be zero non-ignored
+// failing tests.
+#[cfg(feature = "impure")]
+#[rstest]
+fn nix_eval_okay_currently_failing(
+    #[files("src/tests/nix_tests/notyetpassing/eval-okay-*.nix")] code_path: PathBuf,
+) {
+    eval_test(code_path, false)
+}
+
+#[cfg(feature = "impure")]
+#[rstest]
+fn eval_okay_currently_failing(
+    #[files("src/tests/tvix_tests/notyetpassing/eval-okay-*.nix")] code_path: PathBuf,
+) {
+    eval_test(code_path, false)
+}
+
+// eval-fail-* tests contain a snippet of Nix code, which is
+// expected to fail evaluation.  The exact type of failure
+// (assertion, parse error, etc) is not currently checked.
+#[cfg(feature = "impure")]
+#[rstest]
+fn eval_fail(#[files("src/tests/tvix_tests/eval-fail-*.nix")] code_path: PathBuf) {
+    eval_test(code_path, false)
+}
+
+// eval-fail-* tests from the original Nix test suite.
+#[cfg(feature = "impure")]
+#[rstest]
+fn nix_eval_fail(#[files("src/tests/nix_tests/eval-fail-*.nix")] code_path: PathBuf) {
+    eval_test(code_path, false)
+}
diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-readDir.exp b/tvix/eval/src/tests/nix_tests/eval-okay-readDir.exp
index 6413f6d4f9ec..6413f6d4f9ec 100644
--- a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-readDir.exp
+++ b/tvix/eval/src/tests/nix_tests/eval-okay-readDir.exp
diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-readDir.nix.disabled b/tvix/eval/src/tests/nix_tests/eval-okay-readDir.nix
index a7ec9292aae2..a7ec9292aae2 100644
--- a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-readDir.nix.disabled
+++ b/tvix/eval/src/tests/nix_tests/eval-okay-readDir.nix
diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-readFileType.exp b/tvix/eval/src/tests/nix_tests/eval-okay-readFileType.exp
index 6413f6d4f9ec..6413f6d4f9ec 100644
--- a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-readFileType.exp
+++ b/tvix/eval/src/tests/nix_tests/eval-okay-readFileType.exp
diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-readFileType.nix b/tvix/eval/src/tests/nix_tests/eval-okay-readFileType.nix
index 174fb6c3a028..174fb6c3a028 100644
--- a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-readFileType.nix
+++ b/tvix/eval/src/tests/nix_tests/eval-okay-readFileType.nix
diff --git a/tvix/eval/src/tests/tvix_tests/readDir/bar b/tvix/eval/src/tests/nix_tests/readDir/bar
index e69de29bb2d1..e69de29bb2d1 100644
--- a/tvix/eval/src/tests/tvix_tests/readDir/bar
+++ b/tvix/eval/src/tests/nix_tests/readDir/bar
diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/readDir/foo/git-hates-directories b/tvix/eval/src/tests/nix_tests/readDir/foo/git-hates-directories
index e69de29bb2d1..e69de29bb2d1 100644
--- a/tvix/eval/src/tests/nix_tests/notyetpassing/readDir/foo/git-hates-directories
+++ b/tvix/eval/src/tests/nix_tests/readDir/foo/git-hates-directories
diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/readDir/ldir b/tvix/eval/src/tests/nix_tests/readDir/ldir
index 19102815663d..19102815663d 120000
--- a/tvix/eval/src/tests/nix_tests/notyetpassing/readDir/ldir
+++ b/tvix/eval/src/tests/nix_tests/readDir/ldir
diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/readDir/linked b/tvix/eval/src/tests/nix_tests/readDir/linked
index c503f86a0cf7..c503f86a0cf7 120000
--- a/tvix/eval/src/tests/nix_tests/notyetpassing/readDir/linked
+++ b/tvix/eval/src/tests/nix_tests/readDir/linked
diff --git a/tvix/eval/src/tests/one_offs.rs b/tvix/eval/src/tests/one_offs.rs
index 565d1dd48f77..49ed713fc96c 100644
--- a/tvix/eval/src/tests/one_offs.rs
+++ b/tvix/eval/src/tests/one_offs.rs
@@ -5,8 +5,9 @@ fn test_source_builtin() {
     // Test an evaluation with a source-only builtin. The test ensures
     // that the artificially constructed thunking is correct.
 
-    let mut eval = Evaluation::new_impure();
-    eval.src_builtins.push(("testSourceBuiltin", "42"));
+    let eval = Evaluation::builder_pure()
+        .add_src_builtin("testSourceBuiltin", "42")
+        .build();
 
     let result = eval.evaluate("builtins.testSourceBuiltin", None);
     assert!(
@@ -25,7 +26,9 @@ fn test_source_builtin() {
 
 #[test]
 fn skip_broken_bytecode() {
-    let result = Evaluation::new_pure().evaluate(/* code = */ "x", None);
+    let result = Evaluation::builder_pure()
+        .build()
+        .evaluate(/* code = */ "x", None);
 
     assert_eq!(result.errors.len(), 1);
 
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.exp
new file mode 100644
index 000000000000..eb2117a0ce56
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.exp
@@ -0,0 +1 @@
+[ [ "" [ "a" ] "c" ] [ "" [ "a" ] "b" [ "c" ] "" ] [ "" [ "a" null ] "b" [ null "c" ] "" ] [ " " [ "FOO" ] " " ] [ "" [ "abc" ] "" [ "" ] "" ] [ "" [ "abc" ] "" [ "" ] "" ] [ "" [ ] "" ] ]
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.nix
new file mode 100644
index 000000000000..95305040dce2
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.nix
@@ -0,0 +1,10 @@
+[
+  (builtins.split "(a)b" "abc")
+  (builtins.split "([ac])" "abc")
+  (builtins.split "(a)|(c)" "abc")
+  (builtins.split "([[:upper:]]+)" " FOO ")
+
+  (builtins.split "(.*)" "abc")
+  (builtins.split "([abc]*)" "abc")
+  (builtins.split ".*" "")
+]
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-path-exists-child-of-file.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-path-exists-child-of-file.exp
new file mode 100644
index 000000000000..c508d5366f70
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-path-exists-child-of-file.exp
@@ -0,0 +1 @@
+false
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-path-exists-child-of-file.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-path-exists-child-of-file.nix
new file mode 100644
index 000000000000..c756bff755ba
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-path-exists-child-of-file.nix
@@ -0,0 +1 @@
+builtins.pathExists ("/dev/null/.")
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-readDir.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-readDir.exp
deleted file mode 100644
index bf8d2c14ea4f..000000000000
--- a/tvix/eval/src/tests/tvix_tests/eval-okay-readDir.exp
+++ /dev/null
@@ -1 +0,0 @@
-{ bar = "regular"; foo = "directory"; }
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-readDir.nix.disabled b/tvix/eval/src/tests/tvix_tests/eval-okay-readDir.nix.disabled
deleted file mode 100644
index a7ec9292aae2..000000000000
--- a/tvix/eval/src/tests/tvix_tests/eval-okay-readDir.nix.disabled
+++ /dev/null
@@ -1 +0,0 @@
-builtins.readDir ./readDir
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.exp.xml b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.exp.xml
new file mode 100644
index 000000000000..468972b2f819
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.exp.xml
@@ -0,0 +1,5 @@
+<?xml version='1.0' encoding='utf-8'?>
+<expr>
+  <attrs>
+  </attrs>
+</expr>
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.nix
new file mode 100644
index 000000000000..ffcd4415b08f
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.nix
@@ -0,0 +1 @@
+{ }
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-toxml.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml.exp
new file mode 100644
index 000000000000..9ae16de526f1
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml.exp
@@ -0,0 +1 @@
+"<?xml version='1.0' encoding='utf-8'?>\n<expr>\n  <attrs>\n    <attr name=\"&amp;-{\">\n      <string value=\";&amp;&quot;\" />\n    </attr>\n    <attr name=\"a\">\n      <string value=\"s\" />\n    </attr>\n  </attrs>\n</expr>\n"
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-toxml.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml.nix
new file mode 100644
index 000000000000..7d074048ddec
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml.nix
@@ -0,0 +1,2 @@
+# Check some corner cases regarding escaping.
+builtins.toXML { a = "s"; "&-{" = ";&\""; }
diff --git a/tvix/eval/src/value/arbitrary.rs b/tvix/eval/src/value/arbitrary.rs
index bf53f4fcb28a..49b9f2eea3fb 100644
--- a/tvix/eval/src/value/arbitrary.rs
+++ b/tvix/eval/src/value/arbitrary.rs
@@ -1,6 +1,6 @@
 //! Support for configurable generation of arbitrary nix values
 
-use imbl::proptest::{ord_map, vector};
+use proptest::collection::{btree_map, vec};
 use proptest::{prelude::*, strategy::BoxedStrategy};
 use std::ffi::OsString;
 
@@ -33,16 +33,16 @@ impl Arbitrary for NixAttrs {
     fn arbitrary_with(args: Self::Parameters) -> Self::Strategy {
         prop_oneof![
             // Empty attrs representation
-            Just(Self(AttrsRep::Empty)),
+            Just(AttrsRep::Empty.into()),
             // KV representation (name/value pairs)
             (
                 any_with::<Value>(args.clone()),
                 any_with::<Value>(args.clone())
             )
-                .prop_map(|(name, value)| Self(AttrsRep::KV { name, value })),
+                .prop_map(|(name, value)| AttrsRep::KV { name, value }.into()),
             // Map representation
-            ord_map(NixString::arbitrary(), Value::arbitrary_with(args), 0..100)
-                .prop_map(|map| Self(AttrsRep::Im(map)))
+            btree_map(NixString::arbitrary(), Value::arbitrary_with(args), 0..100)
+                .prop_map(|map| AttrsRep::Map(map).into())
         ]
         .boxed()
     }
@@ -53,7 +53,7 @@ impl Arbitrary for NixList {
     type Strategy = BoxedStrategy<Self>;
 
     fn arbitrary_with(args: Self::Parameters) -> Self::Strategy {
-        vector(<Value as Arbitrary>::arbitrary_with(args), 0..100)
+        vec(<Value as Arbitrary>::arbitrary_with(args), 0..100)
             .prop_map(NixList::from)
             .boxed()
     }
diff --git a/tvix/eval/src/value/attrs.rs b/tvix/eval/src/value/attrs.rs
index 33259c8058eb..00b913347418 100644
--- a/tvix/eval/src/value/attrs.rs
+++ b/tvix/eval/src/value/attrs.rs
@@ -6,10 +6,11 @@
 //! Due to this, construction and management of attribute sets has
 //! some peculiarities that are encapsulated within this module.
 use std::borrow::Borrow;
+use std::collections::{btree_map, BTreeMap};
 use std::iter::FromIterator;
+use std::rc::Rc;
 
 use bstr::BStr;
-use imbl::{ordmap, OrdMap};
 use lazy_static::lazy_static;
 use serde::de::{Deserializer, Error, Visitor};
 use serde::Deserialize;
@@ -36,7 +37,7 @@ pub(super) enum AttrsRep {
     #[default]
     Empty,
 
-    Im(OrdMap<NixString, Value>),
+    Map(BTreeMap<NixString, Value>),
 
     /// Warning: this represents a **two**-attribute attrset, with
     /// attribute names "name" and "value", like `{name="foo";
@@ -48,28 +49,6 @@ pub(super) enum AttrsRep {
 }
 
 impl AttrsRep {
-    /// Retrieve reference to a mutable map inside of an attrs,
-    /// optionally changing the representation if required.
-    fn map_mut(&mut self) -> &mut OrdMap<NixString, Value> {
-        match self {
-            AttrsRep::Im(m) => m,
-
-            AttrsRep::Empty => {
-                *self = AttrsRep::Im(OrdMap::new());
-                self.map_mut()
-            }
-
-            AttrsRep::KV { name, value } => {
-                *self = AttrsRep::Im(ordmap! {
-                    NAME_S.clone() => name.clone(),
-                    VALUE_S.clone() => value.clone()
-                });
-
-                self.map_mut()
-            }
-        }
-    }
-
     fn select(&self, key: &BStr) -> Option<&Value> {
         match self {
             AttrsRep::Empty => None,
@@ -80,7 +59,7 @@ impl AttrsRep {
                 _ => None,
             },
 
-            AttrsRep::Im(map) => map.get(key),
+            AttrsRep::Map(map) => map.get(key),
         }
     }
 
@@ -88,18 +67,18 @@ impl AttrsRep {
         match self {
             AttrsRep::Empty => false,
             AttrsRep::KV { .. } => key == "name" || key == "value",
-            AttrsRep::Im(map) => map.contains_key(key),
+            AttrsRep::Map(map) => map.contains_key(key),
         }
     }
 }
 
 #[repr(transparent)]
 #[derive(Clone, Debug, Default)]
-pub struct NixAttrs(pub(super) AttrsRep);
+pub struct NixAttrs(pub(super) Rc<AttrsRep>);
 
-impl From<OrdMap<NixString, Value>> for NixAttrs {
-    fn from(map: OrdMap<NixString, Value>) -> Self {
-        NixAttrs(AttrsRep::Im(map))
+impl From<AttrsRep> for NixAttrs {
+    fn from(rep: AttrsRep) -> Self {
+        NixAttrs(Rc::new(rep))
     }
 }
 
@@ -112,34 +91,41 @@ where
     where
         T: IntoIterator<Item = (K, V)>,
     {
-        NixAttrs(AttrsRep::Im(iter.into_iter().collect()))
+        AttrsRep::Map(
+            iter.into_iter()
+                .map(|(k, v)| (k.into(), v.into()))
+                .collect(),
+        )
+        .into()
+    }
+}
+
+impl From<BTreeMap<NixString, Value>> for NixAttrs {
+    fn from(map: BTreeMap<NixString, Value>) -> Self {
+        AttrsRep::Map(map).into()
     }
 }
 
 impl TotalDisplay for NixAttrs {
     fn total_fmt(&self, f: &mut std::fmt::Formatter<'_>, set: &mut ThunkSet) -> std::fmt::Result {
-        f.write_str("{ ")?;
-
-        match &self.0 {
-            AttrsRep::KV { name, value } => {
-                f.write_str("name = ")?;
-                name.total_fmt(f, set)?;
-                f.write_str("; ")?;
-
-                f.write_str("value = ")?;
-                value.total_fmt(f, set)?;
-                f.write_str("; ")?;
-            }
-
-            AttrsRep::Im(map) => {
-                for (name, value) in map {
-                    write!(f, "{} = ", name.ident_str())?;
-                    value.total_fmt(f, set)?;
-                    f.write_str("; ")?;
+        if let Some(Value::String(s)) = self.select("type") {
+            if *s == "derivation" {
+                write!(f, "ยซderivation ")?;
+                if let Some(p) = self.select("drvPath") {
+                    p.total_fmt(f, set)?;
+                } else {
+                    write!(f, "???")?;
                 }
+                return write!(f, "ยป");
             }
+        }
+
+        f.write_str("{ ")?;
 
-            AttrsRep::Empty => { /* no values to print! */ }
+        for (name, value) in self.iter_sorted() {
+            write!(f, "{} = ", name.ident_str())?;
+            value.total_fmt(f, set)?;
+            f.write_str("; ")?;
         }
 
         f.write_str("}")
@@ -183,25 +169,22 @@ impl<'de> Deserialize<'de> for NixAttrs {
 
 impl NixAttrs {
     pub fn empty() -> Self {
-        Self(AttrsRep::Empty)
+        AttrsRep::Empty.into()
     }
 
     /// Compare two attribute sets by pointer equality. Only makes
-    /// sense for some attribute set reprsentations, i.e. returning
+    /// sense for some attribute set representations, i.e. returning
     /// `false` does not mean that the two attribute sets do not have
     /// equal *content*.
     pub fn ptr_eq(&self, other: &Self) -> bool {
-        match (&self.0, &other.0) {
-            (AttrsRep::Im(lhs), AttrsRep::Im(rhs)) => lhs.ptr_eq(rhs),
-            _ => false,
-        }
+        Rc::ptr_eq(&self.0, &other.0)
     }
 
     /// Return an attribute set containing the merge of the two
     /// provided sets. Keys from the `other` set have precedence.
     pub fn update(self, other: Self) -> Self {
         // Short-circuit on some optimal cases:
-        match (&self.0, &other.0) {
+        match (self.0.as_ref(), other.0.as_ref()) {
             (AttrsRep::Empty, AttrsRep::Empty) => return self,
             (AttrsRep::Empty, _) => return other,
             (_, AttrsRep::Empty) => return self,
@@ -210,41 +193,44 @@ impl NixAttrs {
             // Explicitly handle all branches instead of falling
             // through, to ensure that we get at least some compiler
             // errors if variants are modified.
-            (AttrsRep::Im(_), AttrsRep::Im(_))
-            | (AttrsRep::Im(_), AttrsRep::KV { .. })
-            | (AttrsRep::KV { .. }, AttrsRep::Im(_)) => {}
+            (AttrsRep::Map(_), AttrsRep::Map(_))
+            | (AttrsRep::Map(_), AttrsRep::KV { .. })
+            | (AttrsRep::KV { .. }, AttrsRep::Map(_)) => {}
         };
 
         // Slightly more advanced, but still optimised updates
-        match (self.0, other.0) {
-            (AttrsRep::Im(mut m), AttrsRep::KV { name, value }) => {
+        match (Rc::unwrap_or_clone(self.0), Rc::unwrap_or_clone(other.0)) {
+            (AttrsRep::Map(mut m), AttrsRep::KV { name, value }) => {
                 m.insert(NAME_S.clone(), name);
                 m.insert(VALUE_S.clone(), value);
-                NixAttrs(AttrsRep::Im(m))
+                AttrsRep::Map(m).into()
             }
 
-            (AttrsRep::KV { name, value }, AttrsRep::Im(mut m)) => {
+            (AttrsRep::KV { name, value }, AttrsRep::Map(mut m)) => {
                 match m.entry(NAME_S.clone()) {
-                    imbl::ordmap::Entry::Vacant(e) => {
+                    btree_map::Entry::Vacant(e) => {
                         e.insert(name);
                     }
 
-                    imbl::ordmap::Entry::Occupied(_) => { /* name from `m` has precedence */ }
+                    btree_map::Entry::Occupied(_) => { /* name from `m` has precedence */ }
                 };
 
                 match m.entry(VALUE_S.clone()) {
-                    imbl::ordmap::Entry::Vacant(e) => {
+                    btree_map::Entry::Vacant(e) => {
                         e.insert(value);
                     }
 
-                    imbl::ordmap::Entry::Occupied(_) => { /* value from `m` has precedence */ }
+                    btree_map::Entry::Occupied(_) => { /* value from `m` has precedence */ }
                 };
 
-                NixAttrs(AttrsRep::Im(m))
+                AttrsRep::Map(m).into()
             }
 
             // Plain merge of maps.
-            (AttrsRep::Im(m1), AttrsRep::Im(m2)) => NixAttrs(AttrsRep::Im(m2.union(m1))),
+            (AttrsRep::Map(mut m1), AttrsRep::Map(m2)) => {
+                m1.extend(m2);
+                AttrsRep::Map(m1).into()
+            }
 
             // Cases handled above by the borrowing match:
             _ => unreachable!(),
@@ -253,16 +239,16 @@ impl NixAttrs {
 
     /// Return the number of key-value entries in an attrset.
     pub fn len(&self) -> usize {
-        match &self.0 {
-            AttrsRep::Im(map) => map.len(),
+        match self.0.as_ref() {
+            AttrsRep::Map(map) => map.len(),
             AttrsRep::Empty => 0,
             AttrsRep::KV { .. } => 2,
         }
     }
 
     pub fn is_empty(&self) -> bool {
-        match &self.0 {
-            AttrsRep::Im(map) => map.is_empty(),
+        match self.0.as_ref() {
+            AttrsRep::Map(map) => map.is_empty(),
             AttrsRep::Empty => true,
             AttrsRep::KV { .. } => false,
         }
@@ -298,8 +284,8 @@ impl NixAttrs {
     /// Construct an iterator over all the key-value pairs in the attribute set.
     #[allow(clippy::needless_lifetimes)]
     pub fn iter<'a>(&'a self) -> Iter<KeyValue<'a>> {
-        Iter(match &self.0 {
-            AttrsRep::Im(map) => KeyValue::Im(map.iter()),
+        Iter(match &self.0.as_ref() {
+            AttrsRep::Map(map) => KeyValue::Map(map.iter()),
             AttrsRep::Empty => KeyValue::Empty,
 
             AttrsRep::KV {
@@ -327,10 +313,12 @@ impl NixAttrs {
 
     /// Construct an iterator over all the keys of the attribute set
     pub fn keys(&self) -> Keys {
-        Keys(match &self.0 {
+        Keys(match self.0.as_ref() {
             AttrsRep::Empty => KeysInner::Empty,
-            AttrsRep::Im(m) => KeysInner::Im(m.keys()),
             AttrsRep::KV { .. } => KeysInner::KV(IterKV::default()),
+
+            // TODO(tazjin): only sort when required, not always.
+            AttrsRep::Map(m) => KeysInner::Map(m.keys()),
         })
     }
 
@@ -349,7 +337,7 @@ impl NixAttrs {
 
         // Optimisation: Empty attribute set
         if count == 0 {
-            return Ok(Ok(NixAttrs(AttrsRep::Empty)));
+            return Ok(Ok(AttrsRep::Empty.into()));
         }
 
         // Optimisation: KV pattern
@@ -359,14 +347,14 @@ impl NixAttrs {
             }
         }
 
-        let mut attrs = NixAttrs(AttrsRep::Im(OrdMap::new()));
+        let mut attrs_map = BTreeMap::new();
 
         for _ in 0..count {
             let value = stack_slice.pop().unwrap();
             let key = stack_slice.pop().unwrap();
 
             match key {
-                Value::String(ks) => set_attr(&mut attrs, ks, value)?,
+                Value::String(ks) => set_attr(&mut attrs_map, ks, value)?,
 
                 Value::Null => {
                     // This is in fact valid, but leads to the value
@@ -381,13 +369,13 @@ impl NixAttrs {
             }
         }
 
-        Ok(Ok(attrs))
+        Ok(Ok(AttrsRep::Map(attrs_map).into()))
     }
 
     /// Construct an optimized "KV"-style attribute set given the value for the
     /// `"name"` key, and the value for the `"value"` key
     pub(crate) fn from_kv(name: Value, value: Value) -> Self {
-        NixAttrs(AttrsRep::KV { name, value })
+        AttrsRep::KV { name, value }.into()
     }
 }
 
@@ -396,12 +384,12 @@ impl IntoIterator for NixAttrs {
     type IntoIter = OwnedAttrsIterator;
 
     fn into_iter(self) -> Self::IntoIter {
-        match self.0 {
+        match Rc::unwrap_or_clone(self.0) {
             AttrsRep::Empty => OwnedAttrsIterator(IntoIterRepr::Empty),
             AttrsRep::KV { name, value } => OwnedAttrsIterator(IntoIterRepr::Finite(
                 vec![(NAME_REF.clone(), name), (VALUE_REF.clone(), value)].into_iter(),
             )),
-            AttrsRep::Im(map) => OwnedAttrsIterator(IntoIterRepr::Im(map.into_iter())),
+            AttrsRep::Map(map) => OwnedAttrsIterator(IntoIterRepr::Map(map.into_iter())),
         }
     }
 }
@@ -440,13 +428,17 @@ fn attempt_optimise_kv(slice: &mut [Value]) -> Option<NixAttrs> {
 
 /// Set an attribute on an in-construction attribute set, while
 /// checking against duplicate keys.
-fn set_attr(attrs: &mut NixAttrs, key: NixString, value: Value) -> Result<(), ErrorKind> {
-    match attrs.0.map_mut().entry(key) {
-        imbl::ordmap::Entry::Occupied(entry) => Err(ErrorKind::DuplicateAttrsKey {
+fn set_attr(
+    map: &mut BTreeMap<NixString, Value>,
+    key: NixString,
+    value: Value,
+) -> Result<(), ErrorKind> {
+    match map.entry(key) {
+        btree_map::Entry::Occupied(entry) => Err(ErrorKind::DuplicateAttrsKey {
             key: entry.key().to_string(),
         }),
 
-        imbl::ordmap::Entry::Vacant(entry) => {
+        btree_map::Entry::Vacant(entry) => {
             entry.insert(value);
             Ok(())
         }
@@ -484,7 +476,7 @@ pub enum KeyValue<'a> {
         at: IterKV,
     },
 
-    Im(imbl::ordmap::Iter<'a, NixString, Value>),
+    Map(btree_map::Iter<'a, NixString, Value>),
 }
 
 /// Iterator over a Nix attribute set.
@@ -498,7 +490,7 @@ impl<'a> Iterator for Iter<KeyValue<'a>> {
 
     fn next(&mut self) -> Option<Self::Item> {
         match &mut self.0 {
-            KeyValue::Im(inner) => inner.next(),
+            KeyValue::Map(inner) => inner.next(),
             KeyValue::Empty => None,
 
             KeyValue::KV { name, value, at } => match at {
@@ -523,7 +515,7 @@ impl<'a> ExactSizeIterator for Iter<KeyValue<'a>> {
         match &self.0 {
             KeyValue::Empty => 0,
             KeyValue::KV { .. } => 2,
-            KeyValue::Im(inner) => inner.len(),
+            KeyValue::Map(inner) => inner.len(),
         }
     }
 }
@@ -531,7 +523,7 @@ impl<'a> ExactSizeIterator for Iter<KeyValue<'a>> {
 enum KeysInner<'a> {
     Empty,
     KV(IterKV),
-    Im(imbl::ordmap::Keys<'a, NixString, Value>),
+    Map(btree_map::Keys<'a, NixString, Value>),
 }
 
 pub struct Keys<'a>(KeysInner<'a>);
@@ -551,7 +543,7 @@ impl<'a> Iterator for Keys<'a> {
                 Some(&VALUE_REF)
             }
             KeysInner::KV(IterKV::Done) => None,
-            KeysInner::Im(m) => m.next(),
+            KeysInner::Map(m) => m.next(),
         }
     }
 }
@@ -571,7 +563,7 @@ impl<'a> ExactSizeIterator for Keys<'a> {
         match &self.0 {
             KeysInner::Empty => 0,
             KeysInner::KV(_) => 2,
-            KeysInner::Im(m) => m.len(),
+            KeysInner::Map(m) => m.len(),
         }
     }
 }
@@ -580,7 +572,7 @@ impl<'a> ExactSizeIterator for Keys<'a> {
 pub enum IntoIterRepr {
     Empty,
     Finite(std::vec::IntoIter<(NixString, Value)>),
-    Im(imbl::ordmap::ConsumingIter<(NixString, Value)>),
+    Map(btree_map::IntoIter<NixString, Value>),
 }
 
 /// Wrapper type which hides the internal implementation details from
@@ -595,7 +587,7 @@ impl Iterator for OwnedAttrsIterator {
         match &mut self.0 {
             IntoIterRepr::Empty => None,
             IntoIterRepr::Finite(inner) => inner.next(),
-            IntoIterRepr::Im(inner) => inner.next(),
+            IntoIterRepr::Map(m) => m.next(),
         }
     }
 }
@@ -605,7 +597,7 @@ impl ExactSizeIterator for OwnedAttrsIterator {
         match &self.0 {
             IntoIterRepr::Empty => 0,
             IntoIterRepr::Finite(inner) => inner.len(),
-            IntoIterRepr::Im(inner) => inner.len(),
+            IntoIterRepr::Map(inner) => inner.len(),
         }
     }
 }
@@ -615,7 +607,7 @@ impl DoubleEndedIterator for OwnedAttrsIterator {
         match &mut self.0 {
             IntoIterRepr::Empty => None,
             IntoIterRepr::Finite(inner) => inner.next_back(),
-            IntoIterRepr::Im(inner) => inner.next_back(),
+            IntoIterRepr::Map(inner) => inner.next_back(),
         }
     }
 }
diff --git a/tvix/eval/src/value/attrs/tests.rs b/tvix/eval/src/value/attrs/tests.rs
index 534b78a00d10..b79f45a71b28 100644
--- a/tvix/eval/src/value/attrs/tests.rs
+++ b/tvix/eval/src/value/attrs/tests.rs
@@ -9,7 +9,7 @@ fn test_empty_attrs() {
         .unwrap();
 
     assert!(
-        matches!(attrs, NixAttrs(AttrsRep::Empty)),
+        matches!(attrs.0.as_ref(), AttrsRep::Empty),
         "empty attribute set should use optimised representation"
     );
 }
@@ -21,7 +21,7 @@ fn test_simple_attrs() {
         .unwrap();
 
     assert!(
-        matches!(attrs, NixAttrs(AttrsRep::Im(_))),
+        matches!(attrs.0.as_ref(), AttrsRep::Map(_)),
         "simple attribute set should use map representation",
     )
 }
@@ -45,8 +45,8 @@ fn test_kv_attrs() {
     .expect("constructing K/V pair attrs should succeed")
     .unwrap();
 
-    match kv_attrs {
-        NixAttrs(AttrsRep::KV { name, value })
+    match kv_attrs.0.as_ref() {
+        AttrsRep::KV { name, value }
             if name.to_str().unwrap() == meaning_val.to_str().unwrap()
                 || value.to_str().unwrap() == forty_two_val.to_str().unwrap() => {}
 
diff --git a/tvix/eval/src/value/json.rs b/tvix/eval/src/value/json.rs
index c48e9c1f4e85..24a6bcaf6f21 100644
--- a/tvix/eval/src/value/json.rs
+++ b/tvix/eval/src/value/json.rs
@@ -47,8 +47,8 @@ impl Value {
 
                 for val in l.into_iter() {
                     match generators::request_to_json(co, val).await {
-                        Ok((v, mut ctx)) => {
-                            context = context.join(&mut ctx);
+                        Ok((v, ctx)) => {
+                            context.extend(ctx.into_iter());
                             out.push(v)
                         }
                         Err(cek) => return Ok(Err(cek)),
@@ -100,8 +100,8 @@ impl Value {
                     out.insert(
                         name.to_str()?.to_owned(),
                         match generators::request_to_json(co, value).await {
-                            Ok((v, mut ctx)) => {
-                                context = context.join(&mut ctx);
+                            Ok((v, ctx)) => {
+                                context.extend(ctx.into_iter());
                                 v
                             }
                             Err(cek) => return Ok(Err(cek)),
diff --git a/tvix/eval/src/value/list.rs b/tvix/eval/src/value/list.rs
index 2b8b3de28d19..3e4b23a93f42 100644
--- a/tvix/eval/src/value/list.rs
+++ b/tvix/eval/src/value/list.rs
@@ -2,8 +2,6 @@
 use std::ops::Index;
 use std::rc::Rc;
 
-use imbl::{vector, Vector};
-
 use serde::Deserialize;
 
 use super::thunk::ThunkSet;
@@ -12,7 +10,7 @@ use super::Value;
 
 #[repr(transparent)]
 #[derive(Clone, Debug, Deserialize)]
-pub struct NixList(Rc<Vector<Value>>);
+pub struct NixList(Rc<Vec<Value>>);
 
 impl TotalDisplay for NixList {
     fn total_fmt(&self, f: &mut std::fmt::Formatter<'_>, set: &mut ThunkSet) -> std::fmt::Result {
@@ -27,8 +25,8 @@ impl TotalDisplay for NixList {
     }
 }
 
-impl From<Vector<Value>> for NixList {
-    fn from(vs: Vector<Value>) -> Self {
+impl From<Vec<Value>> for NixList {
+    fn from(vs: Vec<Value>) -> Self {
         Self(Rc::new(vs))
     }
 }
@@ -54,10 +52,10 @@ impl NixList {
             stack_slice.len(),
         );
 
-        NixList(Rc::new(Vector::from_iter(stack_slice)))
+        NixList(Rc::new(stack_slice))
     }
 
-    pub fn iter(&self) -> vector::Iter<Value> {
+    pub fn iter(&self) -> std::slice::Iter<Value> {
         self.0.iter()
     }
 
@@ -65,19 +63,14 @@ impl NixList {
         Rc::ptr_eq(&self.0, &other.0)
     }
 
-    pub fn into_inner(self) -> Vector<Value> {
+    pub fn into_inner(self) -> Vec<Value> {
         Rc::try_unwrap(self.0).unwrap_or_else(|rc| (*rc).clone())
     }
-
-    #[deprecated(note = "callers should avoid constructing from Vec")]
-    pub fn from_vec(vs: Vec<Value>) -> Self {
-        Self(Rc::new(Vector::from_iter(vs)))
-    }
 }
 
 impl IntoIterator for NixList {
     type Item = Value;
-    type IntoIter = imbl::vector::ConsumingIter<Value>;
+    type IntoIter = std::vec::IntoIter<Value>;
 
     fn into_iter(self) -> Self::IntoIter {
         self.into_inner().into_iter()
@@ -86,7 +79,7 @@ impl IntoIterator for NixList {
 
 impl<'a> IntoIterator for &'a NixList {
     type Item = &'a Value;
-    type IntoIter = imbl::vector::Iter<'a, Value>;
+    type IntoIter = std::slice::Iter<'a, Value>;
 
     fn into_iter(self) -> Self::IntoIter {
         self.0.iter()
diff --git a/tvix/eval/src/value/mod.rs b/tvix/eval/src/value/mod.rs
index c171c9a04eb8..2e78f20b49a0 100644
--- a/tvix/eval/src/value/mod.rs
+++ b/tvix/eval/src/value/mod.rs
@@ -7,6 +7,7 @@ use std::path::PathBuf;
 use std::rc::Rc;
 
 use bstr::{BString, ByteVec};
+use codemap::Span;
 use lexical_core::format::CXX_LITERAL;
 use serde::Deserialize;
 
@@ -23,7 +24,6 @@ mod thunk;
 
 use crate::errors::{CatchableErrorKind, ErrorKind};
 use crate::opcode::StackIdx;
-use crate::spans::LightSpan;
 use crate::vm::generators::{self, GenCo};
 use crate::AddContext;
 pub use attrs::NixAttrs;
@@ -84,7 +84,6 @@ pub enum Value {
     FinaliseRequest(bool),
 
     #[serde(skip)]
-    // TODO(tazjin): why is this in a Box?
     Catchable(Box<CatchableErrorKind>),
 }
 
@@ -187,6 +186,21 @@ pub struct CoercionKind {
     pub import_paths: bool,
 }
 
+impl From<CoercionKind> for u8 {
+    fn from(k: CoercionKind) -> u8 {
+        k.strong as u8 | (k.import_paths as u8) << 1
+    }
+}
+
+impl From<u8> for CoercionKind {
+    fn from(byte: u8) -> Self {
+        CoercionKind {
+            strong: byte & 0x01 != 0,
+            import_paths: byte & 0x02 != 0,
+        }
+    }
+}
+
 impl<T> From<T> for Value
 where
     T: Into<NixString>,
@@ -196,14 +210,6 @@ where
     }
 }
 
-/// Constructors
-impl Value {
-    /// Construct a [`Value::Attrs`] from a [`NixAttrs`].
-    pub fn attrs(attrs: NixAttrs) -> Self {
-        Self::Attrs(Box::new(attrs))
-    }
-}
-
 /// Controls what kind of by-pointer equality comparison is allowed.
 ///
 /// See `//tvix/docs/value-pointer-equality.md` for details.
@@ -220,11 +226,16 @@ pub enum PointerEquality {
 }
 
 impl Value {
+    /// Construct a [`Value::Attrs`] from a [`NixAttrs`].
+    pub fn attrs(attrs: NixAttrs) -> Self {
+        Self::Attrs(Box::new(attrs))
+    }
+
     /// Deeply forces a value, traversing e.g. lists and attribute sets and forcing
     /// their contents, too.
     ///
     /// This is a generator function.
-    pub(super) async fn deep_force(self, co: GenCo, span: LightSpan) -> Result<Value, ErrorKind> {
+    pub(super) async fn deep_force(self, co: GenCo, span: Span) -> Result<Value, ErrorKind> {
         if let Some(v) = Self::deep_force_(self.clone(), co, span).await? {
             Ok(v)
         } else {
@@ -233,11 +244,7 @@ impl Value {
     }
 
     /// Returns Some(v) or None to indicate the returned value is myself
-    async fn deep_force_(
-        myself: Value,
-        co: GenCo,
-        span: LightSpan,
-    ) -> Result<Option<Value>, ErrorKind> {
+    async fn deep_force_(myself: Value, co: GenCo, span: Span) -> Result<Option<Value>, ErrorKind> {
         // This is a stack of values which still remain to be forced.
         let mut vals = vec![myself];
 
@@ -256,7 +263,7 @@ impl Value {
                 if !thunk_set.insert(t) {
                     continue;
                 }
-                Thunk::force_(t.clone(), &co, span.clone()).await?
+                Thunk::force_(t.clone(), &co, span).await?
             } else {
                 v
             };
@@ -307,7 +314,7 @@ impl Value {
         self,
         co: GenCo,
         kind: CoercionKind,
-        span: LightSpan,
+        span: Span,
     ) -> Result<Value, ErrorKind> {
         self.coerce_to_string_(&co, kind, span).await
     }
@@ -318,7 +325,7 @@ impl Value {
         self,
         co: &GenCo,
         kind: CoercionKind,
-        span: LightSpan,
+        span: Span,
     ) -> Result<Value, ErrorKind> {
         let mut result = BString::default();
         let mut vals = vec![self];
@@ -331,15 +338,15 @@ impl Value {
 
         loop {
             let value = if let Some(v) = vals.pop() {
-                v.force(co, span.clone()).await?
+                v.force(co, span).await?
             } else {
                 return Ok(Value::String(NixString::new_context_from(context, result)));
             };
             let coerced: Result<BString, _> = match (value, kind) {
                 // coercions that are always done
                 (Value::String(mut s), _) => {
-                    if let Some(ctx) = s.context_mut() {
-                        context = context.join(ctx);
+                    if let Some(ctx) = s.take_context() {
+                        context.extend(ctx.into_iter());
                     }
                     Ok((*s).into())
                 }
@@ -377,7 +384,7 @@ impl Value {
                 // `__toString` is preferred.
                 (Value::Attrs(attrs), kind) => {
                     if let Some(to_string) = attrs.select("__toString") {
-                        let callable = to_string.clone().force(co, span.clone()).await?;
+                        let callable = to_string.clone().force(co, span).await?;
 
                         // Leave the attribute set on the stack as an argument
                         // to the function call.
@@ -467,7 +474,7 @@ impl Value {
         other: Value,
         co: GenCo,
         ptr_eq: PointerEquality,
-        span: LightSpan,
+        span: Span,
     ) -> Result<Value, ErrorKind> {
         self.nix_eq(other, &co, ptr_eq, span).await
     }
@@ -487,7 +494,7 @@ impl Value {
         other: Value,
         co: &GenCo,
         ptr_eq: PointerEquality,
-        span: LightSpan,
+        span: Span,
     ) -> Result<Value, ErrorKind> {
         // this is a stack of ((v1,v2),peq) triples to be compared;
         // after each triple is popped off of the stack, v1 is
@@ -513,13 +520,13 @@ impl Value {
                         }
                     };
 
-                    Thunk::force_(thunk, co, span.clone()).await?
+                    Thunk::force_(thunk, co, span).await?
                 }
 
                 _ => a,
             };
 
-            let b = b.force(co, span.clone()).await?;
+            let b = b.force(co, span).await?;
 
             debug_assert!(!matches!(a, Value::Thunk(_)));
             debug_assert!(!matches!(b, Value::Thunk(_)));
@@ -568,11 +575,11 @@ impl Value {
                     #[allow(clippy::single_match)] // might need more match arms later
                     match (a1.select("type"), a2.select("type")) {
                         (Some(v1), Some(v2)) => {
-                            let s1 = v1.clone().force(co, span.clone()).await?;
+                            let s1 = v1.clone().force(co, span).await?;
                             if s1.is_catchable() {
                                 return Ok(s1);
                             }
-                            let s2 = v2.clone().force(co, span.clone()).await?;
+                            let s2 = v2.clone().force(co, span).await?;
                             if s2.is_catchable() {
                                 return Ok(s2);
                             }
@@ -593,8 +600,8 @@ impl Value {
                                         .context("comparing derivations")?
                                         .clone();
 
-                                    let out1 = out1.clone().force(co, span.clone()).await?;
-                                    let out2 = out2.clone().force(co, span.clone()).await?;
+                                    let out1 = out1.clone().force(co, span).await?;
+                                    let out2 = out2.clone().force(co, span).await?;
 
                                     if out1.is_catchable() {
                                         return Ok(out1);
@@ -745,7 +752,7 @@ impl Value {
         self,
         other: Self,
         co: GenCo,
-        span: LightSpan,
+        span: Span,
     ) -> Result<Result<Ordering, CatchableErrorKind>, ErrorKind> {
         Self::nix_cmp_ordering_(self, other, co, span).await
     }
@@ -754,7 +761,7 @@ impl Value {
         myself: Self,
         other: Self,
         co: GenCo,
-        span: LightSpan,
+        span: Span,
     ) -> Result<Result<Ordering, CatchableErrorKind>, ErrorKind> {
         // this is a stack of ((v1,v2),peq) triples to be compared;
         // after each triple is popped off of the stack, v1 is
@@ -770,14 +777,14 @@ impl Value {
             };
             if ptr_eq == PointerEquality::AllowAll {
                 if a.clone()
-                    .nix_eq(b.clone(), &co, PointerEquality::AllowAll, span.clone())
+                    .nix_eq(b.clone(), &co, PointerEquality::AllowAll, span)
                     .await?
                     .as_bool()?
                 {
                     continue;
                 }
-                a = a.force(&co, span.clone()).await?;
-                b = b.force(&co, span.clone()).await?;
+                a = a.force(&co, span).await?;
+                b = b.force(&co, span).await?;
             }
             let result = match (a, b) {
                 (Value::Catchable(c), _) => return Ok(Err(*c)),
@@ -820,7 +827,7 @@ impl Value {
     }
 
     // TODO(amjoseph): de-asyncify this (when called directly by the VM)
-    pub async fn force(self, co: &GenCo, span: LightSpan) -> Result<Value, ErrorKind> {
+    pub async fn force(self, co: &GenCo, span: Span) -> Result<Value, ErrorKind> {
         if let Value::Thunk(thunk) = self {
             // TODO(amjoseph): use #[tailcall::mutual]
             return Thunk::force_(thunk, co, span).await;
@@ -829,7 +836,7 @@ impl Value {
     }
 
     // need two flavors, because async
-    pub async fn force_owned_genco(self, co: GenCo, span: LightSpan) -> Result<Value, ErrorKind> {
+    pub async fn force_owned_genco(self, co: GenCo, span: Span) -> Result<Value, ErrorKind> {
         if let Value::Thunk(thunk) = self {
             // TODO(amjoseph): use #[tailcall::mutual]
             return Thunk::force_(thunk, &co, span).await;
@@ -881,6 +888,12 @@ impl Value {
             | Value::FinaliseRequest(_) => "an internal Tvix evaluator value".into(),
         }
     }
+
+    /// Constructs a thunk that will be evaluated lazily at runtime. This lets
+    /// users of Tvix implement their own lazy builtins and so on.
+    pub fn suspended_native_thunk(native: Box<dyn Fn() -> Result<Value, ErrorKind>>) -> Self {
+        Value::Thunk(Thunk::new_suspended_native(native))
+    }
 }
 
 trait TotalDisplay {
diff --git a/tvix/eval/src/value/string/context.rs b/tvix/eval/src/value/string/context.rs
new file mode 100644
index 000000000000..e1c04735ddde
--- /dev/null
+++ b/tvix/eval/src/value/string/context.rs
@@ -0,0 +1,161 @@
+use rustc_hash::FxHashSet;
+use serde::Serialize;
+
+use super::NixString;
+
+#[derive(Clone, Debug, Serialize, Hash, PartialEq, Eq)]
+pub enum NixContextElement {
+    /// A plain store path (e.g. source files copied to the store)
+    Plain(String),
+
+    /// Single output of a derivation, represented by its name and its derivation path.
+    Single { name: String, derivation: String },
+
+    /// A reference to a complete derivation
+    /// including its source and its binary closure.
+    /// It is used for the `drvPath` attribute context.
+    /// The referred string is the store path to
+    /// the derivation path.
+    Derivation(String),
+}
+
+/// Nix context strings representation in Tvix. This tracks a set of different kinds of string
+/// dependencies that we can come across during manipulation of our language primitives, mostly
+/// strings. There's some simple algebra of context strings and how they propagate w.r.t. primitive
+/// operations, e.g. concatenation, interpolation and other string operations.
+#[repr(transparent)]
+#[derive(Clone, Debug, Serialize, Default)]
+pub struct NixContext(FxHashSet<NixContextElement>);
+
+impl From<NixContextElement> for NixContext {
+    fn from(value: NixContextElement) -> Self {
+        let mut set = FxHashSet::default();
+        set.insert(value);
+        Self(set)
+    }
+}
+
+impl From<FxHashSet<NixContextElement>> for NixContext {
+    fn from(value: FxHashSet<NixContextElement>) -> Self {
+        Self(value)
+    }
+}
+
+impl<const N: usize> From<[NixContextElement; N]> for NixContext {
+    fn from(value: [NixContextElement; N]) -> Self {
+        let mut set = FxHashSet::default();
+        for elt in value {
+            set.insert(elt);
+        }
+        Self(set)
+    }
+}
+
+impl NixContext {
+    /// Creates an empty context that can be populated
+    /// and passed to form a contextful [NixString], albeit
+    /// if the context is concretly empty, the resulting [NixString]
+    /// will be contextless.
+    pub fn new() -> Self {
+        Self::default()
+    }
+
+    /// For internal consumers, we let people observe
+    /// if the [NixContext] is actually empty or not
+    /// to decide whether they want to skip the allocation
+    /// of a full blown [HashSet].
+    pub(crate) fn is_empty(&self) -> bool {
+        self.0.is_empty()
+    }
+
+    /// Consumes a new [NixContextElement] and add it if not already
+    /// present in this context.
+    pub fn append(mut self, other: NixContextElement) -> Self {
+        self.0.insert(other);
+        self
+    }
+
+    /// Extends the existing context with more context elements.
+    pub fn extend<T>(&mut self, iter: T)
+    where
+        T: IntoIterator<Item = NixContextElement>,
+    {
+        self.0.extend(iter)
+    }
+
+    /// Copies from another [NixString] its context strings
+    /// in this context.
+    pub fn mimic(&mut self, other: &NixString) {
+        if let Some(context) = other.context() {
+            self.extend(context.iter().cloned());
+        }
+    }
+
+    /// Iterates over "plain" context elements, e.g. sources imported
+    /// in the store without more information, i.e. `toFile` or coerced imported paths.
+    /// It yields paths to the store.
+    pub fn iter_plain(&self) -> impl Iterator<Item = &str> {
+        self.iter().filter_map(|elt| {
+            if let NixContextElement::Plain(s) = elt {
+                Some(s.as_str())
+            } else {
+                None
+            }
+        })
+    }
+
+    /// Iterates over "full derivations" context elements, e.g. something
+    /// referring to their `drvPath`, i.e. their full sources and binary closure.
+    /// It yields derivation paths.
+    pub fn iter_derivation(&self) -> impl Iterator<Item = &str> {
+        self.iter().filter_map(|elt| {
+            if let NixContextElement::Derivation(s) = elt {
+                Some(s.as_str())
+            } else {
+                None
+            }
+        })
+    }
+
+    /// Iterates over "single" context elements, e.g. single derived paths,
+    /// or also known as the single output of a given derivation.
+    /// The first element of the tuple is the output name
+    /// and the second element is the derivation path.
+    pub fn iter_single_outputs(&self) -> impl Iterator<Item = (&str, &str)> {
+        self.iter().filter_map(|elt| {
+            if let NixContextElement::Single { name, derivation } = elt {
+                Some((name.as_str(), derivation.as_str()))
+            } else {
+                None
+            }
+        })
+    }
+
+    /// Iterates over any element of the context.
+    pub fn iter(&self) -> impl Iterator<Item = &NixContextElement> {
+        self.0.iter()
+    }
+
+    /// Produces a list of owned references to this current context,
+    /// no matter its type.
+    pub fn to_owned_references(self) -> Vec<String> {
+        self.0
+            .into_iter()
+            .map(|ctx| match ctx {
+                NixContextElement::Derivation(drv_path) => drv_path,
+                NixContextElement::Plain(store_path) => store_path,
+                NixContextElement::Single { derivation, .. } => derivation,
+            })
+            .collect()
+    }
+}
+
+impl IntoIterator for NixContext {
+    type Item = NixContextElement;
+
+    type IntoIter = std::collections::hash_set::IntoIter<NixContextElement>;
+
+    fn into_iter(self) -> Self::IntoIter {
+        self.0.into_iter()
+    }
+}
diff --git a/tvix/eval/src/value/string.rs b/tvix/eval/src/value/string/mod.rs
index ceb43f1ea5ed..5bcb4786b283 100644
--- a/tvix/eval/src/value/string.rs
+++ b/tvix/eval/src/value/string/mod.rs
@@ -1,158 +1,30 @@
 //! This module implements Nix language strings.
 //!
-//! Nix language strings never need to be modified on the language
-//! level, allowing us to shave off some memory overhead and only
-//! paying the cost when creating new strings.
+//! See [`NixString`] for more information about the internals of string values
+
 use bstr::{BStr, BString, ByteSlice, Chars};
+use nohash_hasher::BuildNoHashHasher;
 use rnix::ast;
-use std::alloc::{alloc, dealloc, handle_alloc_error, Layout};
+#[cfg(feature = "no_leak")]
+use rustc_hash::FxHashSet;
+use rustc_hash::FxHasher;
+use std::alloc::dealloc;
+use std::alloc::{alloc, handle_alloc_error, Layout};
 use std::borrow::{Borrow, Cow};
-use std::collections::HashSet;
+use std::cell::RefCell;
 use std::ffi::c_void;
 use std::fmt::{self, Debug, Display};
-use std::hash::Hash;
+use std::hash::{Hash, Hasher};
 use std::ops::Deref;
 use std::ptr::{self, NonNull};
 use std::slice;
 
 use serde::de::{Deserializer, Visitor};
-use serde::{Deserialize, Serialize};
-
-#[derive(Clone, Debug, Serialize, Hash, PartialEq, Eq)]
-pub enum NixContextElement {
-    /// A plain store path (e.g. source files copied to the store)
-    Plain(String),
-
-    /// Single output of a derivation, represented by its name and its derivation path.
-    Single { name: String, derivation: String },
-
-    /// A reference to a complete derivation
-    /// including its source and its binary closure.
-    /// It is used for the `drvPath` attribute context.
-    /// The referred string is the store path to
-    /// the derivation path.
-    Derivation(String),
-}
-
-/// Nix context strings representation in Tvix. This tracks a set of different kinds of string
-/// dependencies that we can come across during manipulation of our language primitives, mostly
-/// strings. There's some simple algebra of context strings and how they propagate w.r.t. primitive
-/// operations, e.g. concatenation, interpolation and other string operations.
-#[repr(transparent)]
-#[derive(Clone, Debug, Serialize, Default)]
-pub struct NixContext(HashSet<NixContextElement>);
-
-impl From<NixContextElement> for NixContext {
-    fn from(value: NixContextElement) -> Self {
-        Self([value].into())
-    }
-}
+use serde::Deserialize;
 
-impl From<HashSet<NixContextElement>> for NixContext {
-    fn from(value: HashSet<NixContextElement>) -> Self {
-        Self(value)
-    }
-}
+mod context;
 
-impl NixContext {
-    /// Creates an empty context that can be populated
-    /// and passed to form a contextful [NixString], albeit
-    /// if the context is concretly empty, the resulting [NixString]
-    /// will be contextless.
-    pub fn new() -> Self {
-        Self::default()
-    }
-
-    /// For internal consumers, we let people observe
-    /// if the [NixContext] is actually empty or not
-    /// to decide whether they want to skip the allocation
-    /// of a full blown [HashSet].
-    pub(crate) fn is_empty(&self) -> bool {
-        self.0.is_empty()
-    }
-
-    /// Consumes a new [NixContextElement] and add it if not already
-    /// present in this context.
-    pub fn append(mut self, other: NixContextElement) -> Self {
-        self.0.insert(other);
-        self
-    }
-
-    /// Consumes both ends of the join into a new NixContent
-    /// containing the union of elements of both ends.
-    pub fn join(mut self, other: &mut NixContext) -> Self {
-        let other_set = std::mem::take(&mut other.0);
-        let mut set: HashSet<NixContextElement> = std::mem::take(&mut self.0);
-        set.extend(other_set);
-        Self(set)
-    }
-
-    /// Copies from another [NixString] its context strings
-    /// in this context.
-    pub fn mimic(&mut self, other: &NixString) {
-        if let Some(context) = other.context() {
-            self.0.extend(context.iter().cloned());
-        }
-    }
-
-    /// Iterates over "plain" context elements, e.g. sources imported
-    /// in the store without more information, i.e. `toFile` or coerced imported paths.
-    /// It yields paths to the store.
-    pub fn iter_plain(&self) -> impl Iterator<Item = &str> {
-        self.iter().filter_map(|elt| {
-            if let NixContextElement::Plain(s) = elt {
-                Some(s.as_str())
-            } else {
-                None
-            }
-        })
-    }
-
-    /// Iterates over "full derivations" context elements, e.g. something
-    /// referring to their `drvPath`, i.e. their full sources and binary closure.
-    /// It yields derivation paths.
-    pub fn iter_derivation(&self) -> impl Iterator<Item = &str> {
-        self.iter().filter_map(|elt| {
-            if let NixContextElement::Derivation(s) = elt {
-                Some(s.as_str())
-            } else {
-                None
-            }
-        })
-    }
-
-    /// Iterates over "single" context elements, e.g. single derived paths,
-    /// or also known as the single output of a given derivation.
-    /// The first element of the tuple is the output name
-    /// and the second element is the derivation path.
-    pub fn iter_single_outputs(&self) -> impl Iterator<Item = (&str, &str)> {
-        self.iter().filter_map(|elt| {
-            if let NixContextElement::Single { name, derivation } = elt {
-                Some((name.as_str(), derivation.as_str()))
-            } else {
-                None
-            }
-        })
-    }
-
-    /// Iterates over any element of the context.
-    pub fn iter(&self) -> impl Iterator<Item = &NixContextElement> {
-        self.0.iter()
-    }
-
-    /// Produces a list of owned references to this current context,
-    /// no matter its type.
-    pub fn to_owned_references(self) -> Vec<String> {
-        self.0
-            .into_iter()
-            .map(|ctx| match ctx {
-                NixContextElement::Derivation(drv_path) => drv_path,
-                NixContextElement::Plain(store_path) => store_path,
-                NixContextElement::Single { derivation, .. } => derivation,
-            })
-            .collect()
-    }
-}
+pub use context::{NixContext, NixContextElement};
 
 /// This type is never instantiated, but serves to document the memory layout of the actual heap
 /// allocation for Nix strings.
@@ -374,6 +246,59 @@ impl NixStringInner {
     }
 }
 
+#[derive(Default)]
+struct InternerInner {
+    #[allow(clippy::disallowed_types)] // Not using the default hasher
+    map: std::collections::HashMap<u64, NonNull<c_void>, BuildNoHashHasher<u64>>,
+    #[cfg(feature = "no_leak")]
+    #[allow(clippy::disallowed_types)] // Not using the default hasher
+    interned_strings: FxHashSet<NonNull<c_void>>,
+}
+
+unsafe impl Send for InternerInner {}
+
+fn hash<T>(s: T) -> u64
+where
+    T: Hash,
+{
+    let mut hasher = FxHasher::default();
+    s.hash(&mut hasher);
+    hasher.finish()
+}
+
+impl InternerInner {
+    pub fn intern(&mut self, s: &[u8]) -> NixString {
+        let hash = hash(s);
+        if let Some(s) = self.map.get(&hash) {
+            return NixString(*s);
+        }
+
+        let string = NixString::new_inner(s, None);
+        self.map.insert(hash, string.0);
+        #[cfg(feature = "no_leak")]
+        self.interned_strings.insert(string.0);
+        string
+    }
+}
+
+#[derive(Default)]
+struct Interner(RefCell<InternerInner>);
+
+impl Interner {
+    pub fn intern(&self, s: &[u8]) -> NixString {
+        self.0.borrow_mut().intern(s)
+    }
+
+    #[cfg(feature = "no_leak")]
+    pub fn is_interned_string(&self, string: &NixString) -> bool {
+        self.0.borrow().interned_strings.contains(&string.0)
+    }
+}
+
+thread_local! {
+    static INTERNER: Interner = Interner::default();
+}
+
 /// Nix string values
 ///
 /// # Internals
@@ -393,7 +318,23 @@ unsafe impl Send for NixString {}
 unsafe impl Sync for NixString {}
 
 impl Drop for NixString {
+    #[cfg(not(feature = "no_leak"))]
     fn drop(&mut self) {
+        if self.context().is_some() {
+            // SAFETY: There's no way to construct a NixString that doesn't leave the allocation correct
+            // according to the rules of dealloc
+            unsafe {
+                NixStringInner::dealloc(self.0);
+            }
+        }
+    }
+
+    #[cfg(feature = "no_leak")]
+    fn drop(&mut self) {
+        if INTERNER.with(|i| i.is_interned_string(self)) {
+            return;
+        }
+
         // SAFETY: There's no way to construct a NixString that doesn't leave the allocation correct
         // according to the rules of dealloc
         unsafe {
@@ -404,9 +345,17 @@ impl Drop for NixString {
 
 impl Clone for NixString {
     fn clone(&self) -> Self {
-        // SAFETY: There's no way to construct a NixString that doesn't leave the allocation correct
-        // according to the rules of clone
-        unsafe { Self(NixStringInner::clone(self.0)) }
+        if cfg!(feature = "no_leak") || self.context().is_some() {
+            // SAFETY: There's no way to construct a NixString that doesn't leave the allocation correct
+            // according to the rules of clone
+            unsafe { Self(NixStringInner::clone(self.0)) }
+        } else {
+            // SAFETY:
+            //
+            // - NixStrings are never mutated
+            // - NixStrings are never freed
+            Self(self.0)
+        }
     }
 }
 
@@ -425,7 +374,7 @@ impl Debug for NixString {
 
 impl PartialEq for NixString {
     fn eq(&self, other: &Self) -> bool {
-        self.as_bstr() == other.as_bstr()
+        self.0 == other.0 || self.as_bstr() == other.as_bstr()
     }
 }
 
@@ -451,6 +400,9 @@ impl PartialOrd for NixString {
 
 impl Ord for NixString {
     fn cmp(&self, other: &Self) -> std::cmp::Ordering {
+        if self.0 == other.0 {
+            return std::cmp::Ordering::Equal;
+        }
         self.as_bstr().cmp(other.as_bstr())
     }
 }
@@ -614,8 +566,30 @@ mod arbitrary {
     }
 }
 
+/// Set non-scientifically. TODO(aspen): think more about what this should be
+const INTERN_THRESHOLD: usize = 32;
+
 impl NixString {
     fn new(contents: &[u8], context: Option<Box<NixContext>>) -> Self {
+        debug_assert!(
+            !context.as_deref().is_some_and(NixContext::is_empty),
+            "BUG: initialized with empty context"
+        );
+
+        if !cfg!(feature = "no_leak") /* It's only safe to intern if we leak strings, since there's
+                                       * nothing yet preventing interned strings from getting freed
+                                       * (and then used by other copies) otherwise
+                                       */
+            && contents.len() <= INTERN_THRESHOLD
+            && context.is_none()
+        {
+            return INTERNER.with(|i| i.intern(contents));
+        }
+
+        Self::new_inner(contents, context)
+    }
+
+    fn new_inner(contents: &[u8], context: Option<Box<NixContext>>) -> Self {
         // SAFETY: We're always fully initializing a NixString here:
         //
         // 1. NixStringInner::alloc sets up the len for us
@@ -707,8 +681,10 @@ impl NixString {
         let context = [self.context(), other.context()]
             .into_iter()
             .flatten()
-            .fold(NixContext::new(), |acc_ctx, new_ctx| {
-                acc_ctx.join(&mut new_ctx.clone())
+            .fold(NixContext::new(), |mut acc_ctx, new_ctx| {
+                // TODO: consume new_ctx?
+                acc_ctx.extend(new_ctx.iter().cloned());
+                acc_ctx
             });
         Self::new_context_from(context, s)
     }
@@ -719,33 +695,59 @@ impl NixString {
         //
         // Also, we're using the same lifetime and mutability as self, to fit the
         // pointer-to-reference conversion rules
-        unsafe { NixStringInner::context_ref(self.0).as_deref() }
+        let context = unsafe { NixStringInner::context_ref(self.0).as_deref() };
+
+        debug_assert!(
+            !context.is_some_and(NixContext::is_empty),
+            "BUG: empty context"
+        );
+
+        context
     }
 
-    pub(crate) fn context_mut(&mut self) -> Option<&mut NixContext> {
+    pub(crate) fn context_mut(&mut self) -> &mut Option<Box<NixContext>> {
         // SAFETY: There's no way to construct an uninitialized or invalid NixString (see the SAFETY
         // comment in `new`).
         //
         // Also, we're using the same lifetime and mutability as self, to fit the
         // pointer-to-reference conversion rules
-        unsafe { NixStringInner::context_mut(self.0).as_deref_mut() }
+        let context = unsafe { NixStringInner::context_mut(self.0) };
+
+        debug_assert!(
+            !context.as_deref().is_some_and(NixContext::is_empty),
+            "BUG: empty context"
+        );
+
+        context
     }
 
+    /// Iterates over all context elements.
+    /// See [iter_plain], [iter_derivation], [iter_single_outputs].
     pub fn iter_context(&self) -> impl Iterator<Item = &NixContext> {
         self.context().into_iter()
     }
 
-    pub fn iter_plain(&self) -> impl Iterator<Item = &str> {
+    /// Iterates over "plain" context elements, e.g. sources imported
+    /// in the store without more information, i.e. `toFile` or coerced imported paths.
+    /// It yields paths to the store.
+    pub fn iter_ctx_plain(&self) -> impl Iterator<Item = &str> {
         self.iter_context().flat_map(|context| context.iter_plain())
     }
 
-    pub fn iter_derivation(&self) -> impl Iterator<Item = &str> {
+    /// Iterates over "full derivations" context elements, e.g. something
+    /// referring to their `drvPath`, i.e. their full sources and binary closure.
+    /// It yields derivation paths.
+    pub fn iter_ctx_derivation(&self) -> impl Iterator<Item = &str> {
         return self
             .iter_context()
             .flat_map(|context| context.iter_derivation());
     }
 
-    pub fn iter_single_outputs(&self) -> impl Iterator<Item = (&str, &str)> {
+    /// Iterates over "single" context elements, e.g. single derived paths,
+    /// or also known as the single output of a given derivation.
+    /// The first element of the tuple is the output name
+    /// and the second element is the derivation path.
+    pub fn iter_ctx_single_outputs(&self) -> impl Iterator<Item = (&str, &str)> {
         return self
             .iter_context()
             .flat_map(|context| context.iter_single_outputs());
@@ -756,12 +758,16 @@ impl NixString {
         self.context().is_some()
     }
 
+    /// This clears the context of the string, returning
+    /// the removed dependency tracking information.
+    pub fn take_context(&mut self) -> Option<Box<NixContext>> {
+        self.context_mut().take()
+    }
+
     /// This clears the context of that string, losing
     /// all dependency tracking information.
     pub fn clear_context(&mut self) {
-        // SAFETY: There's no way to construct an uninitialized or invalid NixString (see the SAFETY
-        // comment in `new`).
-        *unsafe { NixStringInner::context_mut(self.0) } = None;
+        let _ = self.take_context();
     }
 
     pub fn chars(&self) -> Chars<'_> {
@@ -849,7 +855,7 @@ impl Display for NixString {
     }
 }
 
-#[cfg(test)]
+#[cfg(all(test, feature = "arbitrary"))]
 mod tests {
     use test_strategy::proptest;
 
diff --git a/tvix/eval/src/value/thunk.rs b/tvix/eval/src/value/thunk.rs
index a67537f945a9..4b915019d47c 100644
--- a/tvix/eval/src/value/thunk.rs
+++ b/tvix/eval/src/value/thunk.rs
@@ -18,17 +18,16 @@
 //! object, but when forcing a thunk, the runtime *must* mutate the
 //! memoisable slot.
 
+use rustc_hash::FxHashSet;
 use std::{
     cell::{Ref, RefCell, RefMut},
-    collections::HashSet,
     fmt::Debug,
     rc::Rc,
 };
 
 use crate::{
     errors::ErrorKind,
-    opcode::OpCode,
-    spans::LightSpan,
+    opcode::Op,
     upvalues::Upvalues,
     value::Closure,
     vm::generators::{self, GenCo},
@@ -59,7 +58,7 @@ enum ThunkRepr {
     Suspended {
         lambda: Rc<Lambda>,
         upvalues: Rc<Upvalues>,
-        light_span: LightSpan,
+        span: Span,
     },
 
     /// Thunk is a suspended native computation.
@@ -69,10 +68,10 @@ enum ThunkRepr {
     /// value means that infinite recursion has occured.
     Blackhole {
         /// Span at which the thunk was first forced.
-        forced_at: LightSpan,
+        forced_at: Span,
 
         /// Span at which the thunk was originally suspended.
-        suspended_at: Option<LightSpan>,
+        suspended_at: Option<Span>,
 
         /// Span of the first instruction of the actual code inside
         /// the thunk.
@@ -143,11 +142,11 @@ impl Thunk {
         )))))
     }
 
-    pub fn new_suspended(lambda: Rc<Lambda>, light_span: LightSpan) -> Self {
+    pub fn new_suspended(lambda: Rc<Lambda>, span: Span) -> Self {
         Thunk(Rc::new(RefCell::new(ThunkRepr::Suspended {
             upvalues: Rc::new(Upvalues::with_capacity(lambda.upvalue_count)),
             lambda: lambda.clone(),
-            light_span,
+            span,
         })))
     }
 
@@ -162,9 +161,8 @@ impl Thunk {
     /// particularly useful in builtin implementations if the result of calling
     /// a function does not need to be forced immediately, because e.g. it is
     /// stored in an attribute set.
-    pub fn new_suspended_call(callee: Value, arg: Value, light_span: LightSpan) -> Self {
+    pub fn new_suspended_call(callee: Value, arg: Value, span: Span) -> Self {
         let mut lambda = Lambda::default();
-        let span = light_span.span();
 
         let arg_idx = lambda.chunk().push_constant(arg);
         let f_idx = lambda.chunk().push_constant(callee);
@@ -172,28 +170,28 @@ impl Thunk {
         // This is basically a recreation of compile_apply():
         // We need to push the argument onto the stack and then the function.
         // The function (not the argument) needs to be forced before calling.
-        lambda.chunk.push_op(OpCode::OpConstant(arg_idx), span);
-        lambda.chunk().push_op(OpCode::OpConstant(f_idx), span);
-        lambda.chunk.push_op(OpCode::OpForce, span);
-        lambda.chunk.push_op(OpCode::OpCall, span);
+        lambda.chunk.push_op(Op::Constant, span);
+        lambda.chunk.push_uvarint(arg_idx.0 as u64);
+        lambda.chunk.push_op(Op::Constant, span);
+        lambda.chunk.push_uvarint(f_idx.0 as u64);
+        lambda.chunk.push_op(Op::Force, span);
+        lambda.chunk.push_op(Op::Call, span);
 
         // Inform the VM that the chunk has ended
-        lambda.chunk.push_op(OpCode::OpReturn, span);
+        lambda.chunk.push_op(Op::Return, span);
 
         Thunk(Rc::new(RefCell::new(ThunkRepr::Suspended {
             upvalues: Rc::new(Upvalues::with_capacity(0)),
             lambda: Rc::new(lambda),
-            light_span,
+            span,
         })))
     }
 
-    fn prepare_blackhole(&self, forced_at: LightSpan) -> ThunkRepr {
+    fn prepare_blackhole(&self, forced_at: Span) -> ThunkRepr {
         match &*self.0.borrow() {
-            ThunkRepr::Suspended {
-                light_span, lambda, ..
-            } => ThunkRepr::Blackhole {
+            ThunkRepr::Suspended { span, lambda, .. } => ThunkRepr::Blackhole {
                 forced_at,
-                suspended_at: Some(light_span.clone()),
+                suspended_at: Some(*span),
                 content_span: Some(lambda.chunk.first_span()),
             },
 
@@ -205,14 +203,10 @@ impl Thunk {
         }
     }
 
-    pub async fn force(myself: Thunk, co: GenCo, span: LightSpan) -> Result<Value, ErrorKind> {
+    pub async fn force(myself: Thunk, co: GenCo, span: Span) -> Result<Value, ErrorKind> {
         Self::force_(myself, &co, span).await
     }
-    pub async fn force_(
-        mut myself: Thunk,
-        co: &GenCo,
-        span: LightSpan,
-    ) -> Result<Value, ErrorKind> {
+    pub async fn force_(mut myself: Thunk, co: &GenCo, span: Span) -> Result<Value, ErrorKind> {
         // This vector of "thunks which point to the thunk-being-forced", to
         // be updated along with it, is necessary in order to write this
         // function in iterative (and later, mutual-tail-call) form.
@@ -232,7 +226,7 @@ impl Thunk {
             // Begin evaluation of this thunk by marking it as a blackhole, meaning
             // that any other forcing frame encountering this thunk before its
             // evaluation is completed detected an evaluation cycle.
-            let inner = myself.0.replace(myself.prepare_blackhole(span.clone()));
+            let inner = myself.0.replace(myself.prepare_blackhole(span));
 
             match inner {
                 // If there was already a blackhole in the thunk, this is an
@@ -243,8 +237,8 @@ impl Thunk {
                     content_span,
                 } => {
                     return Err(ErrorKind::InfiniteRecursion {
-                        first_force: forced_at.span(),
-                        suspended_at: suspended_at.map(|s| s.span()),
+                        first_force: forced_at,
+                        suspended_at,
                         content_span,
                     })
                 }
@@ -262,13 +256,12 @@ impl Thunk {
                 ThunkRepr::Suspended {
                     lambda,
                     upvalues,
-                    light_span,
+                    span,
                 } => {
                     // TODO(amjoseph): use #[tailcall::mutual] here.  This can
                     // be turned into a tailcall to vm::execute_bytecode() by
                     // passing `also_update` to it.
-                    let value =
-                        generators::request_enter_lambda(co, lambda, upvalues, light_span).await;
+                    let value = generators::request_enter_lambda(co, lambda, upvalues, span).await;
                     myself.0.replace(ThunkRepr::Evaluated(value));
                     continue;
                 }
@@ -422,7 +415,7 @@ impl TotalDisplay for Thunk {
 /// The inner `HashSet` is not available on the outside, as it would be
 /// potentially unsafe to interact with the pointers in the set.
 #[derive(Default)]
-pub struct ThunkSet(HashSet<*const ThunkRepr>);
+pub struct ThunkSet(FxHashSet<*const ThunkRepr>);
 
 impl ThunkSet {
     /// Check whether the given thunk has already been seen. Will mark the thunk
diff --git a/tvix/eval/src/vm/generators.rs b/tvix/eval/src/vm/generators.rs
index 79de6886923a..ae9a8dd6ab51 100644
--- a/tvix/eval/src/vm/generators.rs
+++ b/tvix/eval/src/vm/generators.rs
@@ -81,7 +81,7 @@ pub enum VMRequest {
     EnterLambda {
         lambda: Rc<Lambda>,
         upvalues: Rc<Upvalues>,
-        light_span: LightSpan,
+        span: Span,
     },
 
     /// Emit a runtime warning (already containing a span) through the VM.
@@ -121,6 +121,9 @@ pub enum VMRequest {
     /// Request serialisation of a value to JSON, according to the
     /// slightly odd Nix evaluation rules.
     ToJson(Value),
+
+    /// Request the VM for the file type of the given path.
+    ReadFileType(PathBuf),
 }
 
 /// Human-readable representation of a generator message, used by observers.
@@ -178,6 +181,7 @@ impl Display for VMRequest {
             VMRequest::Span => write!(f, "span"),
             VMRequest::TryForce(v) => write!(f, "try_force({})", v.type_of()),
             VMRequest::ToJson(v) => write!(f, "to_json({})", v.type_of()),
+            VMRequest::ReadFileType(p) => write!(f, "read_file_type({})", p.to_string_lossy()),
         }
     }
 }
@@ -198,10 +202,12 @@ pub enum VMResponse {
     Directory(Vec<(bytes::Bytes, FileType)>),
 
     /// VM response with a span to use at the current point.
-    Span(LightSpan),
+    Span(Span),
 
     /// [std::io::Reader] produced by the VM in response to some IO operation.
     Reader(Box<dyn std::io::Read>),
+
+    FileType(FileType),
 }
 
 impl Display for VMResponse {
@@ -213,6 +219,7 @@ impl Display for VMResponse {
             VMResponse::Directory(d) => write!(f, "dir(len = {})", d.len()),
             VMResponse::Span(_) => write!(f, "span"),
             VMResponse::Reader(_) => write!(f, "reader"),
+            VMResponse::FileType(t) => write!(f, "file_type({})", t),
         }
     }
 }
@@ -234,7 +241,7 @@ where
 {
     /// Helper function to re-enqueue the current generator while it
     /// is awaiting a value.
-    fn reenqueue_generator(&mut self, name: &'static str, span: LightSpan, generator: Generator) {
+    fn reenqueue_generator(&mut self, name: &'static str, span: Span, generator: Generator) {
         self.frames.push(Frame::Generator {
             name,
             generator,
@@ -244,7 +251,7 @@ where
     }
 
     /// Helper function to enqueue a new generator.
-    pub(super) fn enqueue_generator<F, G>(&mut self, name: &'static str, span: LightSpan, gen: G)
+    pub(super) fn enqueue_generator<F, G>(&mut self, name: &'static str, span: Span, gen: G)
     where
         F: Future<Output = Result<Value, ErrorKind>> + 'static,
         G: FnOnce(GenCo) -> F,
@@ -265,7 +272,7 @@ where
     pub(crate) fn run_generator(
         &mut self,
         name: &'static str,
-        span: LightSpan,
+        span: Span,
         frame_id: usize,
         state: GeneratorState,
         mut generator: Generator,
@@ -302,8 +309,8 @@ where
                         // this function prepares the frame stack and yields
                         // back to the outer VM loop.
                         VMRequest::ForceValue(value) => {
-                            self.reenqueue_generator(name, span.clone(), generator);
-                            self.enqueue_generator("force", span.clone(), |co| {
+                            self.reenqueue_generator(name, span, generator);
+                            self.enqueue_generator("force", span, |co| {
                                 value.force_owned_genco(co, span)
                             });
                             return Ok(false);
@@ -311,8 +318,8 @@ where
 
                         // Generator has requested a deep-force.
                         VMRequest::DeepForceValue(value) => {
-                            self.reenqueue_generator(name, span.clone(), generator);
-                            self.enqueue_generator("deep_force", span.clone(), |co| {
+                            self.reenqueue_generator(name, span, generator);
+                            self.enqueue_generator("deep_force", span, |co| {
                                 value.deep_force(co, span)
                             });
                             return Ok(false);
@@ -322,10 +329,10 @@ where
                         // Logic is similar to `ForceValue`, except with the
                         // value being taken from that stack.
                         VMRequest::WithValue(idx) => {
-                            self.reenqueue_generator(name, span.clone(), generator);
+                            self.reenqueue_generator(name, span, generator);
 
                             let value = self.stack[self.with_stack[idx]].clone();
-                            self.enqueue_generator("force", span.clone(), |co| {
+                            self.enqueue_generator("force", span, |co| {
                                 value.force_owned_genco(co, span)
                             });
 
@@ -336,13 +343,13 @@ where
                         // with-stack. Logic is same as above, except for the
                         // value being from that stack.
                         VMRequest::CapturedWithValue(idx) => {
-                            self.reenqueue_generator(name, span.clone(), generator);
+                            self.reenqueue_generator(name, span, generator);
 
                             let call_frame = self.last_call_frame()
                                 .expect("Tvix bug: generator requested captured with-value, but there is no call frame");
 
                             let value = call_frame.upvalues.with_stack().unwrap()[idx].clone();
-                            self.enqueue_generator("force", span.clone(), |co| {
+                            self.enqueue_generator("force", span, |co| {
                                 value.force_owned_genco(co, span)
                             });
 
@@ -351,23 +358,23 @@ where
 
                         VMRequest::NixEquality(values, ptr_eq) => {
                             let values = *values;
-                            self.reenqueue_generator(name, span.clone(), generator);
-                            self.enqueue_generator("nix_eq", span.clone(), |co| {
+                            self.reenqueue_generator(name, span, generator);
+                            self.enqueue_generator("nix_eq", span, |co| {
                                 values.0.nix_eq_owned_genco(values.1, co, ptr_eq, span)
                             });
                             return Ok(false);
                         }
 
                         VMRequest::StringCoerce(val, kind) => {
-                            self.reenqueue_generator(name, span.clone(), generator);
-                            self.enqueue_generator("coerce_to_string", span.clone(), |co| {
+                            self.reenqueue_generator(name, span, generator);
+                            self.enqueue_generator("coerce_to_string", span, |co| {
                                 val.coerce_to_string(co, kind, span)
                             });
                             return Ok(false);
                         }
 
                         VMRequest::Call(callable) => {
-                            self.reenqueue_generator(name, span.clone(), generator);
+                            self.reenqueue_generator(name, span, generator);
                             self.call_value(span, None, callable)?;
                             return Ok(false);
                         }
@@ -375,12 +382,12 @@ where
                         VMRequest::EnterLambda {
                             lambda,
                             upvalues,
-                            light_span,
+                            span,
                         } => {
                             self.reenqueue_generator(name, span, generator);
 
                             self.frames.push(Frame::CallFrame {
-                                span: light_span,
+                                span,
                                 call_frame: CallFrame {
                                     lambda,
                                     upvalues,
@@ -424,7 +431,7 @@ where
                                     path: Some(path),
                                     error: e.into(),
                                 })
-                                .with_span(&span, self)?;
+                                .with_span(span, self)?;
 
                             message = VMResponse::Path(imported);
                         }
@@ -438,7 +445,7 @@ where
                                     path: Some(path),
                                     error: e.into(),
                                 })
-                                .with_span(&span, self)?;
+                                .with_span(span, self)?;
 
                             message = VMResponse::Reader(reader)
                         }
@@ -453,7 +460,7 @@ where
                                     error: e.into(),
                                 })
                                 .map(Value::Bool)
-                                .with_span(&span, self)?;
+                                .with_span(span, self)?;
 
                             message = VMResponse::Value(exists);
                         }
@@ -467,43 +474,57 @@ where
                                     path: Some(path),
                                     error: e.into(),
                                 })
-                                .with_span(&span, self)?;
+                                .with_span(span, self)?;
                             message = VMResponse::Directory(dir);
                         }
 
                         VMRequest::Span => {
-                            message = VMResponse::Span(self.reasonable_light_span());
+                            message = VMResponse::Span(self.reasonable_span);
                         }
 
                         VMRequest::TryForce(value) => {
                             self.try_eval_frames.push(frame_id);
-                            self.reenqueue_generator(name, span.clone(), generator);
+                            self.reenqueue_generator(name, span, generator);
 
                             debug_assert!(
                                 self.frames.len() == frame_id + 1,
                                 "generator should be reenqueued with the same frame ID"
                             );
 
-                            self.enqueue_generator("force", span.clone(), |co| {
+                            self.enqueue_generator("force", span, |co| {
                                 value.force_owned_genco(co, span)
                             });
                             return Ok(false);
                         }
 
                         VMRequest::ToJson(value) => {
-                            self.reenqueue_generator(name, span.clone(), generator);
+                            self.reenqueue_generator(name, span, generator);
                             self.enqueue_generator("to_json", span, |co| {
                                 value.into_contextful_json_generator(co)
                             });
                             return Ok(false);
                         }
+
+                        VMRequest::ReadFileType(path) => {
+                            let file_type = self
+                                .io_handle
+                                .as_ref()
+                                .file_type(&path)
+                                .map_err(|e| ErrorKind::IO {
+                                    path: Some(path),
+                                    error: e.into(),
+                                })
+                                .with_span(span, self)?;
+
+                            message = VMResponse::FileType(file_type);
+                        }
                     }
                 }
 
                 // Generator has completed, and its result value should
                 // be left on the stack.
                 genawaiter::GeneratorState::Complete(result) => {
-                    let value = result.with_span(&span, self)?;
+                    let value = result.with_span(span, self)?;
                     self.stack.push(value);
                     return Ok(true);
                 }
@@ -683,12 +704,12 @@ pub(crate) async fn request_enter_lambda(
     co: &GenCo,
     lambda: Rc<Lambda>,
     upvalues: Rc<Upvalues>,
-    light_span: LightSpan,
+    span: Span,
 ) -> Value {
     let msg = VMRequest::EnterLambda {
         lambda,
         upvalues,
-        light_span,
+        span,
     };
 
     match co.yield_(msg).await {
@@ -745,6 +766,7 @@ pub async fn request_open_file(co: &GenCo, path: PathBuf) -> Box<dyn std::io::Re
     }
 }
 
+#[cfg_attr(not(feature = "impure"), allow(unused))]
 pub(crate) async fn request_path_exists(co: &GenCo, path: PathBuf) -> Value {
     match co.yield_(VMRequest::PathExists(path)).await {
         VMResponse::Value(value) => value,
@@ -755,6 +777,7 @@ pub(crate) async fn request_path_exists(co: &GenCo, path: PathBuf) -> Value {
     }
 }
 
+#[cfg_attr(not(feature = "impure"), allow(unused))]
 pub(crate) async fn request_read_dir(co: &GenCo, path: PathBuf) -> Vec<(bytes::Bytes, FileType)> {
     match co.yield_(VMRequest::ReadDir(path)).await {
         VMResponse::Directory(dir) => dir,
@@ -765,7 +788,7 @@ pub(crate) async fn request_read_dir(co: &GenCo, path: PathBuf) -> Vec<(bytes::B
     }
 }
 
-pub(crate) async fn request_span(co: &GenCo) -> LightSpan {
+pub(crate) async fn request_span(co: &GenCo) -> Span {
     match co.yield_(VMRequest::Span).await {
         VMResponse::Span(span) => span,
         msg => panic!(
@@ -789,6 +812,17 @@ pub(crate) async fn request_to_json(
     }
 }
 
+#[cfg_attr(not(feature = "impure"), allow(unused))]
+pub(crate) async fn request_read_file_type(co: &GenCo, path: PathBuf) -> FileType {
+    match co.yield_(VMRequest::ReadFileType(path)).await {
+        VMResponse::FileType(file_type) => file_type,
+        msg => panic!(
+            "Tvix bug: VM responded with incorrect generator message: {}",
+            msg
+        ),
+    }
+}
+
 /// Call the given value as if it was an attribute set containing a functor. The
 /// arguments must already be prepared on the stack when a generator frame from
 /// this function is invoked.
diff --git a/tvix/eval/src/vm/macros.rs b/tvix/eval/src/vm/macros.rs
index d8a09706ab9c..f9c084d41f91 100644
--- a/tvix/eval/src/vm/macros.rs
+++ b/tvix/eval/src/vm/macros.rs
@@ -49,7 +49,7 @@ macro_rules! cmp_op {
                     }
                 }
 
-                let gen_span = $frame.current_light_span();
+                let gen_span = $frame.current_span();
                 $vm.push_call_frame($span, $frame);
                 $vm.enqueue_generator("compare", gen_span, |co| compare(a, b, co));
                 return Ok(false);
diff --git a/tvix/eval/src/vm/mod.rs b/tvix/eval/src/vm/mod.rs
index 5c244cc3ca97..49e9fc5864be 100644
--- a/tvix/eval/src/vm/mod.rs
+++ b/tvix/eval/src/vm/mod.rs
@@ -14,8 +14,9 @@ mod macros;
 
 use bstr::{BString, ByteSlice, ByteVec};
 use codemap::Span;
+use rustc_hash::FxHashMap;
 use serde_json::json;
-use std::{cmp::Ordering, collections::HashMap, ops::DerefMut, path::PathBuf, rc::Rc};
+use std::{cmp::Ordering, ops::DerefMut, path::PathBuf, rc::Rc};
 
 use crate::{
     arithmetic_op,
@@ -27,8 +28,7 @@ use crate::{
     lifted_pop,
     nix_search_path::NixSearchPath,
     observer::RuntimeObserver,
-    opcode::{CodeIdx, Count, JumpOffset, OpCode, StackIdx, UpvalueIdx},
-    spans::LightSpan,
+    opcode::{CodeIdx, Op, Position, UpvalueIdx},
     upvalues::Upvalues,
     value::{
         Builtin, BuiltinResult, Closure, CoercionKind, Lambda, NixAttrs, NixContext, NixList,
@@ -51,7 +51,7 @@ trait GetSpan {
 
 impl<'o, IO> GetSpan for &VM<'o, IO> {
     fn get_span(self) -> Span {
-        self.reasonable_span.span()
+        self.reasonable_span
     }
 }
 
@@ -61,9 +61,9 @@ impl GetSpan for &CallFrame {
     }
 }
 
-impl GetSpan for &LightSpan {
+impl GetSpan for &Span {
     fn get_span(self) -> Span {
-        self.span()
+        *self
     }
 }
 
@@ -94,7 +94,7 @@ impl<T, S: GetSpan, IO> WithSpan<T, S, IO> for Result<T, ErrorKind> {
                         Frame::CallFrame { span, .. } => {
                             error = Error::new(
                                 ErrorKind::BytecodeError(Box::new(error)),
-                                span.span(),
+                                *span,
                                 vm.source.clone(),
                             );
                         }
@@ -104,7 +104,7 @@ impl<T, S: GetSpan, IO> WithSpan<T, S, IO> for Result<T, ErrorKind> {
                                     err: Box::new(error),
                                     gen_type: name,
                                 },
-                                span.span(),
+                                *span,
                                 vm.source.clone(),
                             );
                         }
@@ -146,10 +146,32 @@ impl CallFrame {
 
     /// Increment this frame's instruction pointer and return the operation that
     /// the pointer moved past.
-    fn inc_ip(&mut self) -> OpCode {
-        let op = self.chunk()[self.ip];
+    fn inc_ip(&mut self) -> Op {
+        debug_assert!(
+            self.ip.0 < self.chunk().code.len(),
+            "out of bounds code at IP {} in {:p}",
+            self.ip.0,
+            self.lambda
+        );
+
+        let op = self.chunk().code[self.ip.0];
         self.ip += 1;
-        op
+        op.into()
+    }
+
+    /// Read a varint-encoded operand and return it. The frame pointer is
+    /// incremented internally.
+    fn read_uvarint(&mut self) -> u64 {
+        let (arg, size) = self.chunk().read_uvarint(self.ip.0);
+        self.ip += size;
+        arg
+    }
+
+    /// Read a fixed-size u16 and increment the frame pointer.
+    fn read_u16(&mut self) -> u16 {
+        let arg = self.chunk().read_u16(self.ip.0);
+        self.ip += 2;
+        arg
     }
 
     /// Construct an error result from the given ErrorKind and the source span
@@ -163,13 +185,6 @@ impl CallFrame {
     pub fn current_span(&self) -> Span {
         self.chunk().get_span(self.ip - 1)
     }
-
-    /// Returns the information needed to calculate the current span,
-    /// but without performing that calculation.
-    // TODO: why pub?
-    pub(crate) fn current_light_span(&self) -> LightSpan {
-        LightSpan::new_actual(self.current_span())
-    }
 }
 
 /// A frame represents an execution state of the VM. The VM has a stack of
@@ -187,7 +202,7 @@ enum Frame {
         call_frame: CallFrame,
 
         /// Span from which the call frame was launched.
-        span: LightSpan,
+        span: Span,
     },
 
     /// Generator represents a frame that can yield further
@@ -201,7 +216,7 @@ enum Frame {
         name: &'static str,
 
         /// Span from which the generator was launched.
-        span: LightSpan,
+        span: Span,
 
         state: GeneratorState,
 
@@ -211,15 +226,15 @@ enum Frame {
 }
 
 impl Frame {
-    pub fn span(&self) -> LightSpan {
+    pub fn span(&self) -> Span {
         match self {
-            Frame::CallFrame { span, .. } | Frame::Generator { span, .. } => span.clone(),
+            Frame::CallFrame { span, .. } | Frame::Generator { span, .. } => *span,
         }
     }
 }
 
 #[derive(Default)]
-struct ImportCache(HashMap<PathBuf, Value>);
+struct ImportCache(FxHashMap<PathBuf, Value>);
 
 /// The `ImportCache` holds the `Value` resulting from `import`ing a certain
 /// file, so that the same file doesn't need to be re-evaluated multiple times.
@@ -309,7 +324,7 @@ struct VM<'o, IO> {
     ///
     /// The VM should update this whenever control flow changes take place (i.e.
     /// entering or exiting a frame to yield control somewhere).
-    reasonable_span: LightSpan,
+    reasonable_span: Span,
 
     /// This field is responsible for handling `builtins.tryEval`. When that
     /// builtin is encountered, it sends a special message to the VM which
@@ -343,7 +358,7 @@ where
         observer: &'o mut dyn RuntimeObserver,
         source: SourceCode,
         globals: Rc<GlobalsMap>,
-        reasonable_span: LightSpan,
+        reasonable_span: Span,
     ) -> Self {
         Self {
             nix_search_path,
@@ -362,7 +377,7 @@ where
     }
 
     /// Push a call frame onto the frame stack.
-    fn push_call_frame(&mut self, span: LightSpan, call_frame: CallFrame) {
+    fn push_call_frame(&mut self, span: Span, call_frame: CallFrame) {
         self.frames.push(Frame::CallFrame { span, call_frame })
     }
 
@@ -434,8 +449,8 @@ where
     ///    stack. In this case, the frame is not returned to the frame stack.
     ///
     /// 2. The code encounters a generator, in which case the frame in its
-    /// current state is pushed back on the stack, and the generator is left on
-    /// top of it for the outer loop to execute.
+    ///    current state is pushed back on the stack, and the generator is left
+    ///    on top of it for the outer loop to execute.
     ///
     /// 3. An error is encountered.
     ///
@@ -444,27 +459,35 @@ where
     ///
     /// The return value indicates whether the bytecode has been executed to
     /// completion, or whether it has been suspended in favour of a generator.
-    fn execute_bytecode(&mut self, span: LightSpan, mut frame: CallFrame) -> EvalResult<bool> {
+    fn execute_bytecode(&mut self, span: Span, mut frame: CallFrame) -> EvalResult<bool> {
         loop {
             let op = frame.inc_ip();
             self.observer.observe_execute_op(frame.ip, &op, &self.stack);
 
             match op {
-                OpCode::OpThunkSuspended(idx) | OpCode::OpThunkClosure(idx) => {
-                    let blueprint = match &frame.chunk()[idx] {
+                Op::ThunkSuspended | Op::ThunkClosure => {
+                    let idx = frame.read_uvarint() as usize;
+
+                    let blueprint = match &frame.chunk().constants[idx] {
                         Value::Blueprint(lambda) => lambda.clone(),
                         _ => panic!("compiler bug: non-blueprint in blueprint slot"),
                     };
 
-                    let upvalue_count = blueprint.upvalue_count;
-                    let thunk = if matches!(op, OpCode::OpThunkClosure(_)) {
+                    let upvalue_count = frame.read_uvarint();
+
+                    debug_assert!(
+                        (upvalue_count >> 1) == blueprint.upvalue_count as u64,
+                        "TODO: new upvalue count not correct",
+                    );
+
+                    let thunk = if op == Op::ThunkClosure {
                         debug_assert!(
-                            upvalue_count > 0,
-                            "OpThunkClosure should not be called for plain lambdas"
+                            (((upvalue_count >> 1) > 0) || (upvalue_count & 0b1 == 1)),
+                            "OpThunkClosure should not be called for plain lambdas",
                         );
                         Thunk::new_closure(blueprint)
                     } else {
-                        Thunk::new_suspended(blueprint, frame.current_light_span())
+                        Thunk::new_suspended(blueprint, frame.current_span())
                     };
                     let upvalues = thunk.upvalues_mut();
                     self.stack.push(Value::Thunk(thunk.clone()));
@@ -477,17 +500,17 @@ where
                     self.populate_upvalues(&mut frame, upvalue_count, upvalues)?;
                 }
 
-                OpCode::OpForce => {
+                Op::Force => {
                     if let Some(Value::Thunk(_)) = self.stack.last() {
                         let thunk = match self.stack_pop() {
                             Value::Thunk(t) => t,
                             _ => unreachable!(),
                         };
 
-                        let gen_span = frame.current_light_span();
+                        let gen_span = frame.current_span();
 
                         self.push_call_frame(span, frame);
-                        self.enqueue_generator("force", gen_span.clone(), |co| {
+                        self.enqueue_generator("force", gen_span, |co| {
                             Thunk::force(thunk, co, gen_span)
                         });
 
@@ -495,27 +518,37 @@ where
                     }
                 }
 
-                OpCode::OpGetUpvalue(upv_idx) => {
-                    let value = frame.upvalue(upv_idx).clone();
+                Op::GetUpvalue => {
+                    let idx = UpvalueIdx(frame.read_uvarint() as usize);
+                    let value = frame.upvalue(idx).clone();
                     self.stack.push(value);
                 }
 
                 // Discard the current frame.
-                OpCode::OpReturn => {
+                Op::Return => {
                     // TODO(amjoseph): I think this should assert `==` rather
                     // than `<=` but it fails with the stricter condition.
                     debug_assert!(self.stack.len() - 1 <= frame.stack_offset);
                     return Ok(true);
                 }
 
-                OpCode::OpConstant(idx) => {
-                    let c = frame.chunk()[idx].clone();
+                Op::Constant => {
+                    let idx = frame.read_uvarint() as usize;
+
+                    debug_assert!(
+                        idx < frame.chunk().constants.len(),
+                        "out of bounds constant at IP {} in {:p}",
+                        frame.ip.0,
+                        frame.lambda
+                    );
+
+                    let c = frame.chunk().constants[idx].clone();
                     self.stack.push(c);
                 }
 
-                OpCode::OpCall => {
+                Op::Call => {
                     let callable = self.stack_pop();
-                    self.call_value(frame.current_light_span(), Some((span, frame)), callable)?;
+                    self.call_value(frame.current_span(), Some((span, frame)), callable)?;
 
                     // exit this loop and let the outer loop enter the new call
                     return Ok(true);
@@ -523,7 +556,8 @@ where
 
                 // Remove the given number of elements from the stack,
                 // but retain the top value.
-                OpCode::OpCloseScope(Count(count)) => {
+                Op::CloseScope => {
+                    let count = frame.read_uvarint() as usize;
                     // Immediately move the top value into the right
                     // position.
                     let target_idx = self.stack.len() - 1 - count;
@@ -535,15 +569,22 @@ where
                     }
                 }
 
-                OpCode::OpClosure(idx) => {
-                    let blueprint = match &frame.chunk()[idx] {
+                Op::Closure => {
+                    let idx = frame.read_uvarint() as usize;
+                    let blueprint = match &frame.chunk().constants[idx] {
                         Value::Blueprint(lambda) => lambda.clone(),
                         _ => panic!("compiler bug: non-blueprint in blueprint slot"),
                     };
 
-                    let upvalue_count = blueprint.upvalue_count;
+                    let upvalue_count = frame.read_uvarint();
+
+                    debug_assert!(
+                        (upvalue_count >> 1) == blueprint.upvalue_count as u64,
+                        "TODO: new upvalue count not correct in closure",
+                    );
+
                     debug_assert!(
-                        upvalue_count > 0,
+                        ((upvalue_count >> 1) > 0 || (upvalue_count & 0b1 == 1)),
                         "OpClosure should not be called for plain lambdas"
                     );
 
@@ -556,7 +597,7 @@ where
                         ))));
                 }
 
-                OpCode::OpAttrsSelect => lifted_pop! {
+                Op::AttrsSelect => lifted_pop! {
                     self(key, attrs) => {
                         let key = key.to_str().with_span(&frame, self)?;
                         let attrs = attrs.to_attrs().with_span(&frame, self)?;
@@ -576,21 +617,24 @@ where
                     }
                 },
 
-                OpCode::OpJumpIfFalse(JumpOffset(offset)) => {
+                Op::JumpIfFalse => {
+                    let offset = frame.read_u16() as usize;
                     debug_assert!(offset != 0);
                     if !self.stack_peek(0).as_bool().with_span(&frame, self)? {
                         frame.ip += offset;
                     }
                 }
 
-                OpCode::OpJumpIfCatchable(JumpOffset(offset)) => {
+                Op::JumpIfCatchable => {
+                    let offset = frame.read_u16() as usize;
                     debug_assert!(offset != 0);
                     if self.stack_peek(0).is_catchable() {
                         frame.ip += offset;
                     }
                 }
 
-                OpCode::OpJumpIfNoFinaliseRequest(JumpOffset(offset)) => {
+                Op::JumpIfNoFinaliseRequest => {
+                    let offset = frame.read_u16() as usize;
                     debug_assert!(offset != 0);
                     match self.stack_peek(0) {
                         Value::FinaliseRequest(finalise) => {
@@ -602,11 +646,11 @@ where
                     }
                 }
 
-                OpCode::OpPop => {
+                Op::Pop => {
                     self.stack.pop();
                 }
 
-                OpCode::OpAttrsTrySelect => {
+                Op::AttrsTrySelect => {
                     let key = self.stack_pop().to_str().with_span(&frame, self)?;
                     let value = match self.stack_pop() {
                         Value::Attrs(attrs) => match attrs.select(&key) {
@@ -620,12 +664,14 @@ where
                     self.stack.push(value);
                 }
 
-                OpCode::OpGetLocal(StackIdx(local_idx)) => {
+                Op::GetLocal => {
+                    let local_idx = frame.read_uvarint() as usize;
                     let idx = frame.stack_offset + local_idx;
                     self.stack.push(self.stack[idx].clone());
                 }
 
-                OpCode::OpJumpIfNotFound(JumpOffset(offset)) => {
+                Op::JumpIfNotFound => {
+                    let offset = frame.read_u16() as usize;
                     debug_assert!(offset != 0);
                     if matches!(self.stack_peek(0), Value::AttrNotFound) {
                         self.stack_pop();
@@ -633,16 +679,17 @@ where
                     }
                 }
 
-                OpCode::OpJump(JumpOffset(offset)) => {
+                Op::Jump => {
+                    let offset = frame.read_u16() as usize;
                     debug_assert!(offset != 0);
                     frame.ip += offset;
                 }
 
-                OpCode::OpEqual => lifted_pop! {
+                Op::Equal => lifted_pop! {
                     self(b, a) => {
-                        let gen_span = frame.current_light_span();
+                        let gen_span = frame.current_span();
                         self.push_call_frame(span, frame);
-                        self.enqueue_generator("nix_eq", gen_span.clone(), |co| {
+                        self.enqueue_generator("nix_eq", gen_span, |co| {
                             a.nix_eq_owned_genco(b, co, PointerEquality::ForbidAll, gen_span)
                         });
                         return Ok(false);
@@ -653,7 +700,7 @@ where
                 // top is not of the expected type. This is necessary
                 // to implement some specific behaviours of Nix
                 // exactly.
-                OpCode::OpAssertBool => {
+                Op::AssertBool => {
                     let val = self.stack_peek(0);
                     // TODO(edef): propagate this into is_bool, since bottom values *are* values of any type
                     if !val.is_catchable() && !val.is_bool() {
@@ -667,7 +714,7 @@ where
                     }
                 }
 
-                OpCode::OpAssertAttrs => {
+                Op::AssertAttrs => {
                     let val = self.stack_peek(0);
                     // TODO(edef): propagate this into is_attrs, since bottom values *are* values of any type
                     if !val.is_catchable() && !val.is_attrs() {
@@ -681,9 +728,9 @@ where
                     }
                 }
 
-                OpCode::OpAttrs(Count(count)) => self.run_attrset(&frame, count)?,
+                Op::Attrs => self.run_attrset(frame.read_uvarint() as usize, &frame)?,
 
-                OpCode::OpAttrsUpdate => lifted_pop! {
+                Op::AttrsUpdate => lifted_pop! {
                     self(rhs, lhs) => {
                         let rhs = rhs.to_attrs().with_span(&frame, self)?;
                         let lhs = lhs.to_attrs().with_span(&frame, self)?;
@@ -691,28 +738,30 @@ where
                     }
                 },
 
-                OpCode::OpInvert => lifted_pop! {
+                Op::Invert => lifted_pop! {
                     self(v) => {
                         let v = v.as_bool().with_span(&frame, self)?;
                         self.stack.push(Value::Bool(!v));
                     }
                 },
 
-                OpCode::OpList(Count(count)) => {
+                Op::List => {
+                    let count = frame.read_uvarint() as usize;
                     let list =
                         NixList::construct(count, self.stack.split_off(self.stack.len() - count));
 
                     self.stack.push(Value::List(list));
                 }
 
-                OpCode::OpJumpIfTrue(JumpOffset(offset)) => {
+                Op::JumpIfTrue => {
+                    let offset = frame.read_u16() as usize;
                     debug_assert!(offset != 0);
                     if self.stack_peek(0).as_bool().with_span(&frame, self)? {
                         frame.ip += offset;
                     }
                 }
 
-                OpCode::OpHasAttr => lifted_pop! {
+                Op::HasAttr => lifted_pop! {
                     self(key, attrs) => {
                         let key = key.to_str().with_span(&frame, self)?;
                         let result = match attrs {
@@ -727,19 +776,20 @@ where
                     }
                 },
 
-                OpCode::OpConcat => lifted_pop! {
+                Op::Concat => lifted_pop! {
                     self(rhs, lhs) => {
                         let rhs = rhs.to_list().with_span(&frame, self)?.into_inner();
-                        let lhs = lhs.to_list().with_span(&frame, self)?.into_inner();
-                        self.stack.push(Value::List(NixList::from(lhs + rhs)))
+                        let mut lhs = lhs.to_list().with_span(&frame, self)?.into_inner();
+                        lhs.extend(rhs.into_iter());
+                        self.stack.push(Value::List(lhs.into()))
                     }
                 },
 
-                OpCode::OpResolveWith => {
+                Op::ResolveWith => {
                     let ident = self.stack_pop().to_str().with_span(&frame, self)?;
 
                     // Re-enqueue this frame.
-                    let op_span = frame.current_light_span();
+                    let op_span = frame.current_span();
                     self.push_call_frame(span, frame);
 
                     // Construct a generator frame doing the lookup in constant
@@ -762,27 +812,33 @@ where
                     return Ok(false);
                 }
 
-                OpCode::OpFinalise(StackIdx(idx)) => match &self.stack[frame.stack_offset + idx] {
-                    Value::Closure(_) => panic!("attempted to finalise a closure"),
-                    Value::Thunk(thunk) => thunk.finalise(&self.stack[frame.stack_offset..]),
-                    _ => panic!("attempted to finalise a non-thunk"),
-                },
+                Op::Finalise => {
+                    let idx = frame.read_uvarint() as usize;
+                    match &self.stack[frame.stack_offset + idx] {
+                        Value::Closure(_) => panic!("attempted to finalise a closure"),
+                        Value::Thunk(thunk) => thunk.finalise(&self.stack[frame.stack_offset..]),
+                        _ => panic!("attempted to finalise a non-thunk"),
+                    }
+                }
+
+                Op::CoerceToString => {
+                    let kind: CoercionKind = frame.chunk().code[frame.ip.0].into();
+                    frame.ip.0 += 1;
 
-                OpCode::OpCoerceToString(kind) => {
                     let value = self.stack_pop();
-                    let gen_span = frame.current_light_span();
+                    let gen_span = frame.current_span();
                     self.push_call_frame(span, frame);
 
-                    self.enqueue_generator("coerce_to_string", gen_span.clone(), |co| {
+                    self.enqueue_generator("coerce_to_string", gen_span, |co| {
                         value.coerce_to_string(co, kind, gen_span)
                     });
 
                     return Ok(false);
                 }
 
-                OpCode::OpInterpolate(Count(count)) => self.run_interpolate(&frame, count)?,
+                Op::Interpolate => self.run_interpolate(frame.read_uvarint(), &frame)?,
 
-                OpCode::OpValidateClosedFormals => {
+                Op::ValidateClosedFormals => {
                     let formals = frame.lambda.formals.as_ref().expect(
                         "OpValidateClosedFormals called within the frame of a lambda without formals",
                     );
@@ -797,7 +853,7 @@ where
                         if !formals.contains(arg) {
                             return frame.error(
                                 self,
-                                ErrorKind::UnexpectedArgument {
+                                ErrorKind::UnexpectedArgumentFormals {
                                     arg: arg.clone(),
                                     formals_span: formals.span,
                                 },
@@ -806,9 +862,9 @@ where
                     }
                 }
 
-                OpCode::OpAdd => lifted_pop! {
+                Op::Add => lifted_pop! {
                     self(b, a) => {
-                        let gen_span = frame.current_light_span();
+                        let gen_span = frame.current_span();
                         self.push_call_frame(span, frame);
 
                         // OpAdd can add not just numbers, but also string-like
@@ -819,21 +875,21 @@ where
                     }
                 },
 
-                OpCode::OpSub => lifted_pop! {
+                Op::Sub => lifted_pop! {
                     self(b, a) => {
                         let result = arithmetic_op!(&a, &b, -).with_span(&frame, self)?;
                         self.stack.push(result);
                     }
                 },
 
-                OpCode::OpMul => lifted_pop! {
+                Op::Mul => lifted_pop! {
                     self(b, a) => {
                         let result = arithmetic_op!(&a, &b, *).with_span(&frame, self)?;
                         self.stack.push(result);
                     }
                 },
 
-                OpCode::OpDiv => lifted_pop! {
+                Op::Div => lifted_pop! {
                     self(b, a) => {
                         match b {
                             Value::Integer(0) => return frame.error(self, ErrorKind::DivisionByZero),
@@ -848,7 +904,7 @@ where
                     }
                 },
 
-                OpCode::OpNegate => match self.stack_pop() {
+                Op::Negate => match self.stack_pop() {
                     Value::Integer(i) => self.stack.push(Value::Integer(-i)),
                     Value::Float(f) => self.stack.push(Value::Float(-f)),
                     Value::Catchable(cex) => self.stack.push(Value::Catchable(cex)),
@@ -863,12 +919,12 @@ where
                     }
                 },
 
-                OpCode::OpLess => cmp_op!(self, frame, span, <),
-                OpCode::OpLessOrEq => cmp_op!(self, frame, span, <=),
-                OpCode::OpMore => cmp_op!(self, frame, span, >),
-                OpCode::OpMoreOrEq => cmp_op!(self, frame, span, >=),
+                Op::Less => cmp_op!(self, frame, span, <),
+                Op::LessOrEq => cmp_op!(self, frame, span, <=),
+                Op::More => cmp_op!(self, frame, span, >),
+                Op::MoreOrEq => cmp_op!(self, frame, span, >=),
 
-                OpCode::OpFindFile => match self.stack_pop() {
+                Op::FindFile => match self.stack_pop() {
                     Value::UnresolvedPath(path) => {
                         let resolved = self
                             .nix_search_path
@@ -880,7 +936,7 @@ where
                     _ => panic!("tvix compiler bug: OpFindFile called on non-UnresolvedPath"),
                 },
 
-                OpCode::OpResolveHomePath => match self.stack_pop() {
+                Op::ResolveHomePath => match self.stack_pop() {
                     Value::UnresolvedPath(path) => {
                         match dirs::home_dir() {
                             None => {
@@ -903,24 +959,23 @@ where
                     }
                 },
 
-                OpCode::OpPushWith(StackIdx(idx)) => self.with_stack.push(frame.stack_offset + idx),
+                Op::PushWith => self
+                    .with_stack
+                    .push(frame.stack_offset + frame.read_uvarint() as usize),
 
-                OpCode::OpPopWith => {
+                Op::PopWith => {
                     self.with_stack.pop();
                 }
 
-                OpCode::OpAssertFail => {
+                Op::AssertFail => {
                     self.stack
                         .push(Value::from(CatchableErrorKind::AssertionFailed));
                 }
 
-                // Data-carrying operands should never be executed,
-                // that is a critical error in the VM/compiler.
-                OpCode::DataStackIdx(_)
-                | OpCode::DataDeferredLocal(_)
-                | OpCode::DataUpvalueIdx(_)
-                | OpCode::DataCaptureWith => {
-                    panic!("Tvix bug: attempted to execute data-carrying operand")
+                // Encountering an invalid opcode is a critical error in the
+                // VM/compiler.
+                Op::Invalid => {
+                    panic!("Tvix bug: attempted to execute invalid opcode")
                 }
             }
         }
@@ -940,7 +995,7 @@ where
         &self.stack[self.stack.len() - 1 - offset]
     }
 
-    fn run_attrset(&mut self, frame: &CallFrame, count: usize) -> EvalResult<()> {
+    fn run_attrset(&mut self, count: usize, frame: &CallFrame) -> EvalResult<()> {
         let attrs = NixAttrs::construct(count, self.stack.split_off(self.stack.len() - count * 2))
             .with_span(frame, self)?
             .map(Value::attrs)
@@ -978,7 +1033,7 @@ where
     /// Interpolate string fragments by popping the specified number of
     /// fragments of the stack, evaluating them to strings, and pushing
     /// the concatenated result string back on the stack.
-    fn run_interpolate(&mut self, frame: &CallFrame, count: usize) -> EvalResult<()> {
+    fn run_interpolate(&mut self, count: u64, frame: &CallFrame) -> EvalResult<()> {
         let mut out = BString::default();
         // Interpolation propagates the context and union them.
         let mut context: NixContext = NixContext::new();
@@ -994,8 +1049,8 @@ where
             }
             let mut nix_string = val.to_contextful_str().with_span(frame, self)?;
             out.push_str(nix_string.as_bstr());
-            if let Some(nix_string_ctx) = nix_string.context_mut() {
-                context = context.join(nix_string_ctx);
+            if let Some(nix_string_ctx) = nix_string.take_context() {
+                context.extend(nix_string_ctx.into_iter())
             }
         }
 
@@ -1004,12 +1059,6 @@ where
         Ok(())
     }
 
-    /// Returns a reasonable light span for the current situation that the VM is
-    /// in.
-    pub fn reasonable_light_span(&self) -> LightSpan {
-        self.reasonable_span.clone()
-    }
-
     /// Apply an argument from the stack to a builtin, and attempt to call it.
     ///
     /// All calls are tail-calls in Tvix, as every function application is a
@@ -1017,7 +1066,7 @@ where
     ///
     /// Due to this, once control flow exits this function, the generator will
     /// automatically be run by the VM.
-    fn call_builtin(&mut self, span: LightSpan, mut builtin: Builtin) -> EvalResult<()> {
+    fn call_builtin(&mut self, span: Span, mut builtin: Builtin) -> EvalResult<()> {
         let builtin_name = builtin.name();
         self.observer.observe_enter_builtin(builtin_name);
 
@@ -1041,8 +1090,8 @@ where
 
     fn call_value(
         &mut self,
-        span: LightSpan,
-        parent: Option<(LightSpan, CallFrame)>,
+        span: Span,
+        parent: Option<(Span, CallFrame)>,
         callable: Value,
     ) -> EvalResult<()> {
         match callable {
@@ -1098,69 +1147,79 @@ where
                 Ok(())
             }
 
-            v => Err(ErrorKind::NotCallable(v.type_of())).with_span(&span, self),
+            v => Err(ErrorKind::NotCallable(v.type_of())).with_span(span, self),
         }
     }
 
     /// Populate the upvalue fields of a thunk or closure under construction.
+    ///
+    /// See the closely tied function `emit_upvalue_data` in the compiler
+    /// implementation for details on the argument processing.
     fn populate_upvalues(
         &mut self,
         frame: &mut CallFrame,
-        count: usize,
+        count: u64,
         mut upvalues: impl DerefMut<Target = Upvalues>,
     ) -> EvalResult<()> {
-        for _ in 0..count {
-            match frame.inc_ip() {
-                OpCode::DataStackIdx(StackIdx(stack_idx)) => {
-                    let idx = frame.stack_offset + stack_idx;
-
-                    let val = match self.stack.get(idx) {
-                        Some(val) => val.clone(),
-                        None => {
-                            return frame.error(
-                                self,
-                                ErrorKind::TvixBug {
-                                    msg: "upvalue to be captured was missing on stack",
-                                    metadata: Some(Rc::new(json!({
-                                        "ip": format!("{:#x}", frame.ip.0 - 1),
-                                        "stack_idx(relative)": stack_idx,
-                                        "stack_idx(absolute)": idx,
-                                    }))),
-                                },
-                            );
-                        }
-                    };
+        // Determine whether to capture the with stack, and then shift the
+        // actual count of upvalues back.
+        let capture_with = count & 0b1 == 1;
+        let count = count >> 1;
+        if capture_with {
+            // Start the captured with_stack off of the
+            // current call frame's captured with_stack, ...
+            let mut captured_with_stack = frame
+                .upvalues
+                .with_stack()
+                .cloned()
+                // ... or make an empty one if there isn't one already.
+                .unwrap_or_else(|| Vec::with_capacity(self.with_stack.len()));
+
+            for idx in &self.with_stack {
+                captured_with_stack.push(self.stack[*idx].clone());
+            }
 
-                    upvalues.deref_mut().push(val);
-                }
+            upvalues.deref_mut().set_with_stack(captured_with_stack);
+        }
 
-                OpCode::DataUpvalueIdx(upv_idx) => {
-                    upvalues.deref_mut().push(frame.upvalue(upv_idx).clone());
-                }
+        for _ in 0..count {
+            let pos = Position(frame.read_uvarint());
 
-                OpCode::DataDeferredLocal(idx) => {
-                    upvalues.deref_mut().push(Value::DeferredUpvalue(idx));
-                }
+            if let Some(stack_idx) = pos.runtime_stack_index() {
+                let idx = frame.stack_offset + stack_idx.0;
 
-                OpCode::DataCaptureWith => {
-                    // Start the captured with_stack off of the
-                    // current call frame's captured with_stack, ...
-                    let mut captured_with_stack = frame
-                        .upvalues
-                        .with_stack()
-                        .cloned()
-                        // ... or make an empty one if there isn't one already.
-                        .unwrap_or_else(|| Vec::with_capacity(self.with_stack.len()));
-
-                    for idx in &self.with_stack {
-                        captured_with_stack.push(self.stack[*idx].clone());
+                let val = match self.stack.get(idx) {
+                    Some(val) => val.clone(),
+                    None => {
+                        return frame.error(
+                            self,
+                            ErrorKind::TvixBug {
+                                msg: "upvalue to be captured was missing on stack",
+                                metadata: Some(Rc::new(json!({
+                                    "ip": format!("{:#x}", frame.ip.0 - 1),
+                                    "stack_idx(relative)": stack_idx.0,
+                                    "stack_idx(absolute)": idx,
+                                }))),
+                            },
+                        );
                     }
+                };
 
-                    upvalues.deref_mut().set_with_stack(captured_with_stack);
-                }
+                upvalues.deref_mut().push(val);
+                continue;
+            }
 
-                _ => panic!("compiler error: missing closure operand"),
+            if let Some(idx) = pos.runtime_deferred_local() {
+                upvalues.deref_mut().push(Value::DeferredUpvalue(idx));
+                continue;
             }
+
+            if let Some(idx) = pos.runtime_upvalue_index() {
+                upvalues.deref_mut().push(frame.upvalue(idx).clone());
+                continue;
+            }
+
+            panic!("Tvix bug: invalid capture position emitted")
         }
 
         Ok(())
@@ -1345,17 +1404,17 @@ where
         observer,
         source,
         globals,
-        root_span.into(),
+        root_span,
     );
 
     // When evaluating strictly, synthesise a frame that will instruct
     // the VM to deep-force the final value before returning it.
     if strict {
-        vm.enqueue_generator("final_deep_force", root_span.into(), final_deep_force);
+        vm.enqueue_generator("final_deep_force", root_span, final_deep_force);
     }
 
     vm.frames.push(Frame::CallFrame {
-        span: root_span.into(),
+        span: root_span,
         call_frame: CallFrame {
             lambda,
             upvalues: Rc::new(Upvalues::with_capacity(0)),
diff --git a/tvix/eval/tests/nix_oracle.rs b/tvix/eval/tests/nix_oracle.rs
index 5a5cc0a8223f..cc2a7b519e8f 100644
--- a/tvix/eval/tests/nix_oracle.rs
+++ b/tvix/eval/tests/nix_oracle.rs
@@ -56,11 +56,18 @@ fn nix_eval(expr: &str, strictness: Strictness) -> String {
 /// `NIX_INSTANTIATE_BINARY_PATH` env var to resolve the `nix-instantiate` binary) and tvix, and
 /// assert that the result is identical
 #[track_caller]
+#[cfg(feature = "impure")]
 fn compare_eval(expr: &str, strictness: Strictness) {
+    use tvix_eval::EvalIO;
+
     let nix_result = nix_eval(expr, strictness);
-    let mut eval = tvix_eval::Evaluation::new_pure();
-    eval.strict = matches!(strictness, Strictness::Strict);
-    eval.io_handle = Box::new(tvix_eval::StdIO);
+    let mut eval_builder = tvix_eval::Evaluation::builder_pure();
+    if matches!(strictness, Strictness::Strict) {
+        eval_builder = eval_builder.strict();
+    }
+    let eval = eval_builder
+        .io_handle(Box::new(tvix_eval::StdIO) as Box<dyn EvalIO>)
+        .build();
 
     let tvix_result = eval
         .evaluate(expr, None)
@@ -76,6 +83,7 @@ fn compare_eval(expr: &str, strictness: Strictness) {
 macro_rules! compare_eval_tests {
     ($strictness:expr, {}) => {};
     ($strictness:expr, {$(#[$meta:meta])* $test_name: ident($expr: expr); $($rest:tt)*}) => {
+        #[cfg(feature = "impure")]
         #[test]
         $(#[$meta])*
         fn $test_name() {
diff --git a/tvix/glue/Cargo.toml b/tvix/glue/Cargo.toml
index 0afdefeaaa0e..bb522dceb902 100644
--- a/tvix/glue/Cargo.toml
+++ b/tvix/glue/Cargo.toml
@@ -4,43 +4,46 @@ version = "0.1.0"
 edition = "2021"
 
 [dependencies]
-async-compression = { version = "0.4.9", features = ["tokio", "gzip", "bzip2", "xz"]}
-bstr = "1.6.0"
-bytes = "1.4.0"
-data-encoding = "2.3.3"
-futures = "0.3.30"
-magic = "0.16.2"
+async-compression = { workspace = true, features = ["tokio", "gzip", "bzip2", "xz"] }
+bstr = { workspace = true }
+bytes = { workspace = true }
+data-encoding = { workspace = true }
+futures = { workspace = true }
+magic = { workspace = true }
 nix-compat = { path = "../nix-compat" }
-pin-project = "1.1"
-reqwest = { version = "0.11.22", features = ["rustls-tls-native-roots"], default-features = false }
+pin-project = { workspace = true }
+reqwest = { workspace = true, features = ["rustls-tls-native-roots"] }
 tvix-build = { path = "../build", default-features = false, features = []}
 tvix-eval = { path = "../eval" }
 tvix-castore = { path = "../castore" }
 tvix-store = { path = "../store", default-features = false, features = []}
-tracing = "0.1.37"
-tokio = "1.28.0"
-tokio-tar = "0.3.1"
-tokio-util = { version = "0.7.9", features = ["io", "io-util", "compat"] }
-thiserror = "1.0.38"
-serde = "1.0.195"
-serde_json = "1.0"
-sha2 = "0.10.8"
-sha1 = "0.10.6"
-md-5 = "0.10.6"
-url = "2.4.0"
-walkdir = "2.4.0"
-
-[dependencies.wu-manber]
-git = "https://github.com/tvlfyi/wu-manber.git"
+tvix-tracing = { path = "../tracing" }
+tracing = { workspace = true }
+tracing-indicatif = { workspace = true }
+tokio = { workspace = true }
+tokio-tar = { workspace = true }
+tokio-util = { workspace = true, features = ["io", "io-util", "compat"] }
+thiserror = { workspace = true }
+serde = { workspace = true }
+serde_json = { workspace = true }
+sha2 = { workspace = true }
+sha1 = { workspace = true }
+md-5 = { workspace = true }
+url = { workspace = true }
+walkdir = { workspace = true }
+clap = { workspace = true }
+wu-manber = { workspace = true }
 
 [dev-dependencies]
-criterion = { version = "0.5", features = ["html_reports"] }
-hex-literal = "0.4.1"
-lazy_static = "1.4.0"
-nix = { version = "0.27.1", features = [ "fs" ] }
-pretty_assertions = "1.4.0"
-rstest = "0.19.0"
-tempfile = "3.8.1"
+criterion = { workspace = true, features = ["html_reports"] }
+hex-literal = { workspace = true }
+lazy_static = { workspace = true }
+mimalloc = { workspace = true }
+nix = { workspace = true, features = ["fs"] }
+pretty_assertions = { workspace = true }
+rstest = { workspace = true }
+tempfile = { workspace = true }
+tokio-test = { workspace = true }
 
 [features]
 default = ["nix_tests"]
diff --git a/tvix/glue/benches/eval.rs b/tvix/glue/benches/eval.rs
index 202278c1aa01..a0823d2e129b 100644
--- a/tvix/glue/benches/eval.rs
+++ b/tvix/glue/benches/eval.rs
@@ -1,5 +1,7 @@
+use clap::Parser;
 use criterion::{black_box, criterion_group, criterion_main, Criterion};
 use lazy_static::lazy_static;
+use mimalloc::MiMalloc;
 use std::{env, rc::Rc, sync::Arc, time::Duration};
 use tvix_build::buildservice::DummyBuildService;
 use tvix_eval::{builtins::impure_builtins, EvalIO};
@@ -9,7 +11,10 @@ use tvix_glue::{
     tvix_io::TvixIO,
     tvix_store_io::TvixStoreIO,
 };
-use tvix_store::utils::construct_services;
+use tvix_store::utils::{construct_services, ServiceUrlsMemory};
+
+#[global_allocator]
+static GLOBAL: MiMalloc = MiMalloc;
 
 lazy_static! {
     static ref TOKIO_RUNTIME: tokio::runtime::Runtime = tokio::runtime::Runtime::new().unwrap();
@@ -21,38 +26,41 @@ fn interpret(code: &str) {
     // piece of code. b/262
     let (blob_service, directory_service, path_info_service, nar_calculation_service) =
         TOKIO_RUNTIME
-            .block_on(async { construct_services("memory://", "memory://", "memory://").await })
+            .block_on(async {
+                construct_services(ServiceUrlsMemory::parse_from(std::iter::empty::<&str>())).await
+            })
             .unwrap();
 
     // We assemble a complete store in memory.
     let tvix_store_io = Rc::new(TvixStoreIO::new(
         blob_service,
         directory_service,
-        path_info_service.into(),
+        path_info_service,
         nar_calculation_service.into(),
         Arc::<DummyBuildService>::default(),
         TOKIO_RUNTIME.handle().clone(),
     ));
 
-    let mut eval = tvix_eval::Evaluation::new(
-        Box::new(TvixIO::new(tvix_store_io.clone() as Rc<dyn EvalIO>)) as Box<dyn EvalIO>,
-        true,
-    );
+    let mut eval_builder = tvix_eval::Evaluation::builder(Box::new(TvixIO::new(
+        tvix_store_io.clone() as Rc<dyn EvalIO>,
+    )) as Box<dyn EvalIO>)
+    .enable_import()
+    .add_builtins(impure_builtins());
 
-    eval.builtins.extend(impure_builtins());
-    add_derivation_builtins(&mut eval, Rc::clone(&tvix_store_io));
-    add_fetcher_builtins(&mut eval, Rc::clone(&tvix_store_io));
-    add_import_builtins(&mut eval, tvix_store_io);
-    configure_nix_path(
-        &mut eval,
+    eval_builder = add_derivation_builtins(eval_builder, Rc::clone(&tvix_store_io));
+    eval_builder = add_fetcher_builtins(eval_builder, Rc::clone(&tvix_store_io));
+    eval_builder = add_import_builtins(eval_builder, tvix_store_io);
+    eval_builder = configure_nix_path(
+        eval_builder,
         // The benchmark requires TVIX_BENCH_NIX_PATH to be set, so barf out
         // early, rather than benchmarking tvix returning an error.
         &Some(env::var("TVIX_BENCH_NIX_PATH").expect("TVIX_BENCH_NIX_PATH must be set")),
     );
 
+    let eval = eval_builder.build();
     let result = eval.evaluate(code, None);
 
-    assert!(result.errors.is_empty());
+    assert!(result.errors.is_empty(), "{:#?}", result.errors);
 }
 
 fn eval_nixpkgs(c: &mut Criterion) {
@@ -61,6 +69,12 @@ fn eval_nixpkgs(c: &mut Criterion) {
             interpret(black_box("(import <nixpkgs> {}).hello.outPath"));
         })
     });
+
+    c.bench_function("firefox outpath", |b| {
+        b.iter(|| {
+            interpret(black_box("(import <nixpkgs> {}).firefox.outPath"));
+        })
+    });
 }
 
 criterion_group!(
diff --git a/tvix/glue/build.rs b/tvix/glue/build.rs
new file mode 100644
index 000000000000..544c34a6cbcb
--- /dev/null
+++ b/tvix/glue/build.rs
@@ -0,0 +1,6 @@
+fn main() {
+    // Pick up new test case files
+    // https://github.com/la10736/rstest/issues/256
+    println!("cargo:rerun-if-changed=src/tests/nix_tests");
+    println!("cargo:rerun-if-changed=src/tests/tvix_tests")
+}
diff --git a/tvix/glue/default.nix b/tvix/glue/default.nix
index 08f5c2228d76..0ead94a504c3 100644
--- a/tvix/glue/default.nix
+++ b/tvix/glue/default.nix
@@ -1,8 +1,17 @@
-{ depot, pkgs, ... }:
+{ depot, pkgs, lib, ... }:
 
 (depot.tvix.crates.workspaceMembers.tvix-glue.build.override {
   runTests = true;
   testPreRun = ''
-    export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt;
+    export SSL_CERT_FILE=/dev/null
   '';
+}).overrideAttrs (old: rec {
+  meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+  passthru = old.passthru // (depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = [ "nix_tests" ];
+    override.testPreRun = ''
+      export SSL_CERT_FILE=/dev/null
+    '';
+  });
 })
diff --git a/tvix/glue/src/builtins/derivation.rs b/tvix/glue/src/builtins/derivation.rs
index a7742ae40a66..5bc9dab71283 100644
--- a/tvix/glue/src/builtins/derivation.rs
+++ b/tvix/glue/src/builtins/derivation.rs
@@ -168,14 +168,21 @@ fn handle_fixed_output(
 #[builtins(state = "Rc<TvixStoreIO>")]
 pub(crate) mod derivation_builtins {
     use std::collections::BTreeMap;
+    use std::io::Cursor;
 
     use crate::builtins::utils::{select_string, strong_importing_coerce_to_string};
+    use crate::fetchurl::fetchurl_derivation_to_fetch;
 
     use super::*;
     use bstr::ByteSlice;
-    use nix_compat::store_path::hash_placeholder;
+    use md5::Digest;
+    use nix_compat::nixhash::CAHash;
+    use nix_compat::store_path::{build_ca_path, hash_placeholder};
+    use sha2::Sha256;
+    use tvix_castore::Node;
     use tvix_eval::generators::Gen;
     use tvix_eval::{NixContext, NixContextElement, NixString};
+    use tvix_store::proto::{NarInfo, PathInfo};
 
     #[builtin("placeholder")]
     async fn builtin_placeholder(co: GenCo, input: Value) -> Result<Value, ErrorKind> {
@@ -338,9 +345,9 @@ pub(crate) mod derivation_builtins {
                             input_context.mimic(&val_str);
 
                             if arg_name == "builder" {
-                                drv.builder = val_str.to_str()?.to_owned();
+                                val_str.to_str()?.clone_into(&mut drv.builder);
                             } else {
-                                drv.system = val_str.to_str()?.to_owned();
+                                val_str.to_str()?.clone_into(&mut drv.system);
                             }
 
                             // Either populate drv.environment or structured_attrs.
@@ -372,12 +379,12 @@ pub(crate) mod derivation_builtins {
                             return Ok(val);
                         }
 
-                        let (val_json, mut context) = match val.into_contextful_json(&co).await? {
+                        let (val_json, context) = match val.into_contextful_json(&co).await? {
                             Ok(v) => v,
                             Err(cek) => return Ok(Value::from(cek)),
                         };
 
-                        input_context = input_context.join(&mut context);
+                        input_context.extend(context.into_iter());
 
                         // No need to check for dups, we only iterate over every attribute name once
                         structured_attrs.insert(arg_name.to_owned(), val_json);
@@ -506,6 +513,17 @@ pub(crate) mod derivation_builtins {
                 ))),
         )));
 
+        // If the derivation is a fake derivation (builtins:fetchurl),
+        // synthesize a [Fetch] and add it there, too.
+        if drv.builder == "builtin:fetchurl" {
+            let (name, fetch) =
+                fetchurl_derivation_to_fetch(&drv).map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
+
+            known_paths
+                .add_fetch(fetch, &name)
+                .map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
+        }
+
         // Register the Derivation in known_paths.
         known_paths.add_derivation(drv_path, drv);
 
@@ -513,7 +531,12 @@ pub(crate) mod derivation_builtins {
     }
 
     #[builtin("toFile")]
-    async fn builtin_to_file(co: GenCo, name: Value, content: Value) -> Result<Value, ErrorKind> {
+    async fn builtin_to_file(
+        state: Rc<TvixStoreIO>,
+        co: GenCo,
+        name: Value,
+        content: Value,
+    ) -> Result<Value, ErrorKind> {
         if name.is_catchable() {
             return Ok(name);
         }
@@ -529,24 +552,84 @@ pub(crate) mod derivation_builtins {
             .to_contextful_str()
             .context("evaluating the `content` parameter of builtins.toFile")?;
 
-        if content.iter_derivation().count() > 0 || content.iter_single_outputs().count() > 0 {
+        if content.iter_ctx_derivation().count() > 0
+            || content.iter_ctx_single_outputs().count() > 0
+        {
             return Err(ErrorKind::UnexpectedContext);
         }
 
-        let path =
-            nix_compat::store_path::build_text_path(name.to_str()?, &content, content.iter_plain())
-                .map_err(|_e| {
-                    nix_compat::derivation::DerivationError::InvalidOutputName(
-                        name.to_str_lossy().into_owned(),
-                    )
+        let store_path = state.tokio_handle.block_on(async {
+            // upload contents to the blobservice and create a root node
+            let mut blob_writer = state.blob_service.open_write().await;
+
+            let mut r = Cursor::new(&content);
+
+            let blob_size = tokio::io::copy(&mut r, &mut blob_writer).await?;
+            let blob_digest = blob_writer.close().await?;
+            let ca_hash = CAHash::Text(Sha256::digest(&content).into());
+
+            let store_path: StorePathRef =
+                build_ca_path(name.to_str()?, &ca_hash, content.iter_ctx_plain(), false)
+                    .map_err(|_e| {
+                        nix_compat::derivation::DerivationError::InvalidOutputName(
+                            name.to_str_lossy().into_owned(),
+                        )
+                    })
+                    .map_err(DerivationError::InvalidDerivation)?;
+
+            let root_node = Node::File {
+                digest: blob_digest,
+                size: blob_size,
+                executable: false,
+            };
+
+            // calculate the nar hash
+            let (nar_size, nar_sha256) = state
+                .nar_calculation_service
+                .calculate_nar(&root_node)
+                .await
+                .map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
+
+            // assemble references from plain context.
+            let reference_paths: Vec<StorePathRef> = content
+                .iter_ctx_plain()
+                .map(|elem| StorePathRef::from_absolute_path(elem.as_bytes()))
+                .collect::<Result<_, _>>()
+                .map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
+
+            // persist via pathinfo service.
+            state
+                .path_info_service
+                .put(PathInfo {
+                    node: Some(tvix_castore::proto::Node::from_name_and_node(
+                        store_path.to_string().into(),
+                        root_node,
+                    )),
+                    references: reference_paths
+                        .iter()
+                        .map(|x| bytes::Bytes::copy_from_slice(x.digest()))
+                        .collect(),
+                    narinfo: Some(NarInfo {
+                        nar_size,
+                        nar_sha256: nar_sha256.to_vec().into(),
+                        signatures: vec![],
+                        reference_names: reference_paths
+                            .into_iter()
+                            .map(|x| x.to_string())
+                            .collect(),
+                        deriver: None,
+                        ca: Some(ca_hash.into()),
+                    }),
                 })
-                .map_err(DerivationError::InvalidDerivation)?
-                .to_absolute_path();
+                .await
+                .map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
 
-        let context: NixContext = NixContextElement::Plain(path.clone()).into();
+            Ok::<_, ErrorKind>(store_path)
+        })?;
 
-        // TODO: actually persist the file in the store at that path ...
+        let abs_path = store_path.to_absolute_path();
+        let context: NixContext = NixContextElement::Plain(abs_path.clone()).into();
 
-        Ok(Value::from(NixString::new_context_from(context, path)))
+        Ok(Value::from(NixString::new_context_from(context, abs_path)))
     }
 }
diff --git a/tvix/glue/src/builtins/errors.rs b/tvix/glue/src/builtins/errors.rs
index f6d5745c56e2..af8a24e6abb8 100644
--- a/tvix/glue/src/builtins/errors.rs
+++ b/tvix/glue/src/builtins/errors.rs
@@ -4,7 +4,7 @@ use nix_compat::{
     store_path::BuildStorePathError,
 };
 use reqwest::Url;
-use std::rc::Rc;
+use std::{path::PathBuf, rc::Rc};
 use thiserror::Error;
 use tvix_castore::import;
 
@@ -65,8 +65,12 @@ pub enum FetcherError {
 pub enum ImportError {
     #[error("non-file '{0}' cannot be imported in 'flat' mode")]
     FlatImportOfNonFile(String),
+
     #[error("hash mismatch at ingestion of '{0}', expected: '{1}', got: '{2}'")]
     HashMismatch(String, NixHash, NixHash),
+
+    #[error("path '{}' is not in the Nix store", .0.display())]
+    PathNotInStore(PathBuf),
 }
 
 impl From<ImportError> for tvix_eval::ErrorKind {
diff --git a/tvix/glue/src/builtins/fetchers.rs b/tvix/glue/src/builtins/fetchers.rs
index c7602c03e81f..1ad43b383353 100644
--- a/tvix/glue/src/builtins/fetchers.rs
+++ b/tvix/glue/src/builtins/fetchers.rs
@@ -6,16 +6,17 @@ use crate::{
     tvix_store_io::TvixStoreIO,
 };
 use nix_compat::nixhash;
-use nix_compat::nixhash::NixHash;
 use std::rc::Rc;
-use tracing::info;
 use tvix_eval::builtin_macros::builtins;
 use tvix_eval::generators::Gen;
 use tvix_eval::generators::GenCo;
 use tvix_eval::{CatchableErrorKind, ErrorKind, Value};
+use url::Url;
 
+// Used as a return type for extract_fetch_args, which is sharing some
+// parsing code between the fetchurl and fetchTarball builtins.
 struct NixFetchArgs {
-    url_str: String,
+    url: Url,
     name: Option<String>,
     sha256: Option<[u8; 32]>,
 }
@@ -30,8 +31,12 @@ async fn extract_fetch_args(
         // Get the raw bytes, not the ToString repr.
         let url_str =
             String::from_utf8(url_str.as_bytes().to_vec()).map_err(|_| ErrorKind::Utf8)?;
+
+        // Parse the URL.
+        let url = Url::parse(&url_str).map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
+
         return Ok(Ok(NixFetchArgs {
-            url_str,
+            url,
             name: None,
             sha256: None,
         }));
@@ -55,7 +60,14 @@ async fn extract_fetch_args(
         Err(cek) => return Ok(Err(cek)),
     };
 
-    // TODO: disallow other attrset keys, to match Nix' behaviour.
+    // Disallow other attrset keys, to match Nix' behaviour.
+    // We complain about the first unexpected key we find in the list.
+    const VALID_KEYS: [&[u8]; 3] = [b"url", b"name", b"sha256"];
+    if let Some(first_invalid_key) = attrs.keys().find(|k| !&VALID_KEYS.contains(&k.as_bytes())) {
+        return Err(ErrorKind::UnexpectedArgumentBuiltin(
+            first_invalid_key.clone(),
+        ));
+    }
 
     // parse the sha256 string into a digest.
     let sha256 = match sha256_str {
@@ -69,18 +81,16 @@ async fn extract_fetch_args(
         None => None,
     };
 
-    Ok(Ok(NixFetchArgs {
-        url_str,
-        name,
-        sha256,
-    }))
+    // Parse the URL.
+    let url = Url::parse(&url_str).map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
+
+    Ok(Ok(NixFetchArgs { url, name, sha256 }))
 }
 
 #[allow(unused_variables)] // for the `state` arg, for now
 #[builtins(state = "Rc<TvixStoreIO>")]
 pub(crate) mod fetcher_builtins {
-    use crate::builtins::FetcherError;
-    use url::Url;
+    use nix_compat::nixhash::NixHash;
 
     use super::*;
 
@@ -112,8 +122,6 @@ pub(crate) mod fetcher_builtins {
             }
             None => {
                 // If we don't have enough info, do the fetch now.
-                info!(?fetch, "triggering required fetch");
-
                 let (store_path, _root_node) = state
                     .tokio_handle
                     .block_on(async { state.fetcher.ingest_and_persist(&name, fetch).await })
@@ -138,16 +146,15 @@ pub(crate) mod fetcher_builtins {
         // Derive the name from the URL basename if not set explicitly.
         let name = args
             .name
-            .unwrap_or_else(|| url_basename(&args.url_str).to_owned());
-
-        // Parse the URL.
-        let url = Url::parse(&args.url_str)
-            .map_err(|e| ErrorKind::TvixError(Rc::new(FetcherError::InvalidUrl(e))))?;
+            .unwrap_or_else(|| url_basename(&args.url).to_owned());
 
         fetch_lazy(
             state,
             name,
-            Fetch::URL(url, args.sha256.map(NixHash::Sha256)),
+            Fetch::URL {
+                url: args.url,
+                exp_hash: args.sha256.map(NixHash::Sha256),
+            },
         )
     }
 
@@ -168,11 +175,14 @@ pub(crate) mod fetcher_builtins {
             .name
             .unwrap_or_else(|| DEFAULT_NAME_FETCH_TARBALL.to_owned());
 
-        // Parse the URL.
-        let url = Url::parse(&args.url_str)
-            .map_err(|e| ErrorKind::TvixError(Rc::new(FetcherError::InvalidUrl(e))))?;
-
-        fetch_lazy(state, name, Fetch::Tarball(url, args.sha256))
+        fetch_lazy(
+            state,
+            name,
+            Fetch::Tarball {
+                url: args.url,
+                exp_nar_sha256: args.sha256,
+            },
+        )
     }
 
     #[builtin("fetchGit")]
diff --git a/tvix/glue/src/builtins/import.rs b/tvix/glue/src/builtins/import.rs
index 4a15afa8142d..34ae2778ecdd 100644
--- a/tvix/glue/src/builtins/import.rs
+++ b/tvix/glue/src/builtins/import.rs
@@ -3,6 +3,7 @@
 use crate::builtins::errors::ImportError;
 use std::path::Path;
 use tvix_castore::import::ingest_entries;
+use tvix_castore::Node;
 use tvix_eval::{
     builtin_macros::builtins,
     generators::{self, GenCo},
@@ -16,7 +17,7 @@ async fn filtered_ingest(
     co: GenCo,
     path: &Path,
     filter: Option<&Value>,
-) -> Result<tvix_castore::proto::node::Node, ErrorKind> {
+) -> Result<Node, ErrorKind> {
     let mut entries: Vec<walkdir::DirEntry> = vec![];
     let mut it = walkdir::WalkDir::new(path)
         .follow_links(false)
@@ -104,18 +105,20 @@ async fn filtered_ingest(
 
 #[builtins(state = "Rc<TvixStoreIO>")]
 mod import_builtins {
+    use std::os::unix::ffi::OsStrExt;
     use std::rc::Rc;
 
     use super::*;
 
+    use crate::tvix_store_io::TvixStoreIO;
     use nix_compat::nixhash::{CAHash, NixHash};
+    use nix_compat::store_path::StorePathRef;
+    use sha2::Digest;
+    use tokio::io::AsyncWriteExt;
+    use tvix_eval::builtins::coerce_value_to_path;
     use tvix_eval::generators::Gen;
     use tvix_eval::{generators::GenCo, ErrorKind, Value};
-    use tvix_eval::{NixContextElement, NixString};
-
-    use tvix_castore::B3Digest;
-
-    use crate::tvix_store_io::TvixStoreIO;
+    use tvix_eval::{FileType, NixContextElement, NixString};
 
     #[builtin("path")]
     async fn builtin_path(
@@ -125,9 +128,13 @@ mod import_builtins {
     ) -> Result<Value, ErrorKind> {
         let args = args.to_attrs()?;
         let path = args.select_required("path")?;
-        let path = generators::request_force(&co, path.clone())
-            .await
-            .to_path()?;
+        let path =
+            match coerce_value_to_path(&co, generators::request_force(&co, path.clone()).await)
+                .await?
+            {
+                Ok(path) => path,
+                Err(cek) => return Ok(cek.into()),
+            };
         let name: String = if let Some(name) = args.select("name") {
             generators::request_force(&co, name.clone())
                 .await
@@ -165,71 +172,143 @@ mod import_builtins {
             })
             .transpose()?;
 
-        // FUTUREWORK(performance): this opens the file instead of using a stat-like
-        // system call to the file.
-        if !recursive_ingestion && state.open(path.as_ref()).is_err() {
-            Err(ImportError::FlatImportOfNonFile(
-                path.to_string_lossy().to_string(),
-            ))?;
-        }
+        // Check if the path points to a regular file.
+        // If it does, the filter function is never executed.
+        // TODO: follow symlinks and check their type instead
+        let (root_node, ca_hash) = match state.file_type(path.as_ref())? {
+            FileType::Regular => {
+                let mut file = state.open(path.as_ref())?;
+                // This is a single file, copy it to the blobservice directly.
+                let mut hash = sha2::Sha256::new();
+                let mut blob_size = 0;
+                let mut blob_writer = state
+                    .tokio_handle
+                    .block_on(async { state.blob_service.open_write().await });
+
+                let mut buf = [0u8; 4096];
+
+                loop {
+                    // read bytes into buffer, break out if EOF
+                    let len = file.read(&mut buf)?;
+                    if len == 0 {
+                        break;
+                    }
+                    blob_size += len as u64;
 
-        let root_node = filtered_ingest(state.clone(), co, path.as_ref(), filter).await?;
-        let ca: CAHash = if recursive_ingestion {
-            CAHash::Nar(NixHash::Sha256(state.tokio_handle.block_on(async {
-                Ok::<_, tvix_eval::ErrorKind>(
+                    let data = &buf[0..len];
+
+                    // add to blobwriter
                     state
-                        .nar_calculation_service
-                        .as_ref()
-                        .calculate_nar(&root_node)
-                        .await
-                        .map_err(|e| ErrorKind::TvixError(Rc::new(e)))?
-                        .1,
-                )
-            })?))
-        } else {
-            let digest: B3Digest = match root_node {
-                tvix_castore::proto::node::Node::File(ref fnode) => {
-                    // It's already validated.
-                    fnode.digest.clone().try_into().unwrap()
+                        .tokio_handle
+                        .block_on(async { blob_writer.write_all(data).await })?;
+
+                    // update the sha256 hash function. We can skip that if we're not using it.
+                    if !recursive_ingestion {
+                        hash.update(data);
+                    }
                 }
-                // We cannot hash anything else than file in flat import mode.
-                _ => {
+
+                // close the blob writer, get back the b3 digest.
+                let blob_digest = state
+                    .tokio_handle
+                    .block_on(async { blob_writer.close().await })?;
+
+                let root_node = Node::File {
+                    digest: blob_digest,
+                    size: blob_size,
+                    executable: false,
+                };
+
+                let ca_hash = if recursive_ingestion {
+                    let (_nar_size, nar_sha256) = state
+                        .tokio_handle
+                        .block_on(async {
+                            state
+                                .nar_calculation_service
+                                .as_ref()
+                                .calculate_nar(&root_node)
+                                .await
+                        })
+                        .map_err(|e| tvix_eval::ErrorKind::TvixError(Rc::new(e)))?;
+                    CAHash::Nar(NixHash::Sha256(nar_sha256))
+                } else {
+                    CAHash::Flat(NixHash::Sha256(hash.finalize().into()))
+                };
+
+                (root_node, ca_hash)
+            }
+
+            FileType::Directory => {
+                if !recursive_ingestion {
                     return Err(ImportError::FlatImportOfNonFile(
                         path.to_string_lossy().to_string(),
-                    )
-                    .into())
+                    ))?;
                 }
-            };
 
-            // FUTUREWORK: avoid hashing again.
-            CAHash::Flat(NixHash::Sha256(
-                state
+                // do the filtered ingest
+                let root_node = filtered_ingest(state.clone(), co, path.as_ref(), filter).await?;
+
+                // calculate the NAR sha256
+                let (_nar_size, nar_sha256) = state
                     .tokio_handle
-                    .block_on(async { state.blob_to_sha256_hash(digest).await })?,
-            ))
+                    .block_on(async {
+                        state
+                            .nar_calculation_service
+                            .as_ref()
+                            .calculate_nar(&root_node)
+                            .await
+                    })
+                    .map_err(|e| tvix_eval::ErrorKind::TvixError(Rc::new(e)))?;
+
+                let ca_hash = CAHash::Nar(NixHash::Sha256(nar_sha256));
+
+                (root_node, ca_hash)
+            }
+            FileType::Symlink => {
+                // FUTUREWORK: Nix follows a symlink if it's at the root,
+                // except if it's not resolve-able (NixOS/nix#7761).i
+                return Err(tvix_eval::ErrorKind::IO {
+                    path: Some(path.to_path_buf()),
+                    error: Rc::new(std::io::Error::new(
+                        std::io::ErrorKind::Unsupported,
+                        "builtins.path pointing to a symlink is ill-defined.",
+                    )),
+                });
+            }
+            FileType::Unknown => {
+                return Err(tvix_eval::ErrorKind::IO {
+                    path: Some(path.to_path_buf()),
+                    error: Rc::new(std::io::Error::new(
+                        std::io::ErrorKind::Unsupported,
+                        "unsupported file type",
+                    )),
+                })
+            }
         };
 
-        let obtained_hash = ca.hash().clone().into_owned();
         let (path_info, _hash, output_path) = state.tokio_handle.block_on(async {
             state
-                .node_to_path_info(name.as_ref(), path.as_ref(), ca, root_node)
+                .node_to_path_info(name.as_ref(), path.as_ref(), &ca_hash, root_node)
                 .await
         })?;
 
         if let Some(expected_sha256) = expected_sha256 {
-            if obtained_hash != expected_sha256 {
+            if *ca_hash.hash() != expected_sha256 {
                 Err(ImportError::HashMismatch(
                     path.to_string_lossy().to_string(),
                     expected_sha256,
-                    obtained_hash,
+                    ca_hash.hash().into_owned(),
                 ))?;
             }
         }
 
-        let _: tvix_store::proto::PathInfo = state.tokio_handle.block_on(async {
-            // This is necessary to cause the coercion of the error type.
-            Ok::<_, std::io::Error>(state.path_info_service.as_ref().put(path_info).await?)
-        })?;
+        state
+            .tokio_handle
+            .block_on(async { state.path_info_service.as_ref().put(path_info).await })
+            .map_err(|e| tvix_eval::ErrorKind::IO {
+                path: Some(path.to_path_buf()),
+                error: Rc::new(e.into()),
+            })?;
 
         // We need to attach context to the final output path.
         let outpath = output_path.to_absolute_path();
@@ -264,7 +343,7 @@ mod import_builtins {
                     .register_node_in_path_info_service(
                         name,
                         &p,
-                        CAHash::Nar(NixHash::Sha256(nar_sha256)),
+                        &CAHash::Nar(NixHash::Sha256(nar_sha256)),
                         root_node,
                     )
                     .await
@@ -280,6 +359,44 @@ mod import_builtins {
                 .into(),
         )
     }
+
+    #[builtin("storePath")]
+    async fn builtin_store_path(
+        state: Rc<TvixStoreIO>,
+        co: GenCo,
+        path: Value,
+    ) -> Result<Value, ErrorKind> {
+        let p = std::str::from_utf8(match &path {
+            Value::String(s) => s.as_bytes(),
+            Value::Path(p) => p.as_os_str().as_bytes(),
+            _ => {
+                return Err(ErrorKind::TypeError {
+                    expected: "string or path",
+                    actual: path.type_of(),
+                })
+            }
+        })?;
+
+        let path_exists =
+            if let Ok((store_path, sub_path)) = StorePathRef::from_absolute_path_full(p) {
+                if !sub_path.as_os_str().is_empty() {
+                    false
+                } else {
+                    state.store_path_exists(store_path.as_ref()).await?
+                }
+            } else {
+                false
+            };
+
+        if !path_exists {
+            return Err(ImportError::PathNotInStore(p.into()).into());
+        }
+
+        Ok(Value::String(NixString::new_context_from(
+            [NixContextElement::Plain(p.into())].into(),
+            p,
+        )))
+    }
 }
 
 pub use import_builtins::builtins as import_builtins;
diff --git a/tvix/glue/src/builtins/mod.rs b/tvix/glue/src/builtins/mod.rs
index 3d6263286dc4..21685484867a 100644
--- a/tvix/glue/src/builtins/mod.rs
+++ b/tvix/glue/src/builtins/mod.rs
@@ -18,13 +18,14 @@ pub use errors::{DerivationError, FetcherError, ImportError};
 ///
 /// As they need to interact with `known_paths`, we also need to pass in
 /// `known_paths`.
-pub fn add_derivation_builtins<IO>(eval: &mut tvix_eval::Evaluation<IO>, io: Rc<TvixStoreIO>) {
-    eval.builtins
-        .extend(derivation::derivation_builtins::builtins(Rc::clone(&io)));
-
-    // Add the actual `builtins.derivation` from compiled Nix code
-    eval.src_builtins
-        .push(("derivation", include_str!("derivation.nix")));
+pub fn add_derivation_builtins<'co, 'ro, 'env, IO>(
+    eval_builder: tvix_eval::EvaluationBuilder<'co, 'ro, 'env, IO>,
+    io: Rc<TvixStoreIO>,
+) -> tvix_eval::EvaluationBuilder<'co, 'ro, 'env, IO> {
+    eval_builder
+        .add_builtins(derivation::derivation_builtins::builtins(Rc::clone(&io)))
+        // Add the actual `builtins.derivation` from compiled Nix code
+        .add_src_builtin("derivation", include_str!("derivation.nix"))
 }
 
 /// Adds fetcher builtins to the passed [tvix_eval::Evaluation]:
@@ -32,9 +33,11 @@ pub fn add_derivation_builtins<IO>(eval: &mut tvix_eval::Evaluation<IO>, io: Rc<
 /// * `fetchurl`
 /// * `fetchTarball`
 /// * `fetchGit`
-pub fn add_fetcher_builtins<IO>(eval: &mut tvix_eval::Evaluation<IO>, io: Rc<TvixStoreIO>) {
-    eval.builtins
-        .extend(fetchers::fetcher_builtins::builtins(Rc::clone(&io)));
+pub fn add_fetcher_builtins<'co, 'ro, 'env, IO>(
+    eval_builder: tvix_eval::EvaluationBuilder<'co, 'ro, 'env, IO>,
+    io: Rc<TvixStoreIO>,
+) -> tvix_eval::EvaluationBuilder<'co, 'ro, 'env, IO> {
+    eval_builder.add_builtins(fetchers::fetcher_builtins::builtins(Rc::clone(&io)))
 }
 
 /// Adds import-related builtins to the passed [tvix_eval::Evaluation].
@@ -42,10 +45,12 @@ pub fn add_fetcher_builtins<IO>(eval: &mut tvix_eval::Evaluation<IO>, io: Rc<Tvi
 /// These are `filterSource` and `path`
 ///
 /// As they need to interact with the store implementation, we pass [`TvixStoreIO`].
-pub fn add_import_builtins<IO>(eval: &mut tvix_eval::Evaluation<IO>, io: Rc<TvixStoreIO>) {
-    eval.builtins.extend(import::import_builtins(io));
-
+pub fn add_import_builtins<'co, 'ro, 'env, IO>(
+    eval_builder: tvix_eval::EvaluationBuilder<'co, 'ro, 'env, IO>,
+    io: Rc<TvixStoreIO>,
+) -> tvix_eval::EvaluationBuilder<'co, 'ro, 'env, IO> {
     // TODO(raitobezarius): evaluate expressing filterSource as Nix code using path (b/372)
+    eval_builder.add_builtins(import::import_builtins(io))
 }
 
 #[cfg(test)]
@@ -55,12 +60,13 @@ mod tests {
     use crate::tvix_store_io::TvixStoreIO;
 
     use super::{add_derivation_builtins, add_fetcher_builtins, add_import_builtins};
+    use clap::Parser;
     use nix_compat::store_path::hash_placeholder;
     use rstest::rstest;
     use tempfile::TempDir;
     use tvix_build::buildservice::DummyBuildService;
     use tvix_eval::{EvalIO, EvaluationResult};
-    use tvix_store::utils::construct_services;
+    use tvix_store::utils::{construct_services, ServiceUrlsMemory};
 
     /// evaluates a given nix expression and returns the result.
     /// Takes care of setting up the evaluator so it knows about the
@@ -69,23 +75,25 @@ mod tests {
         // We assemble a complete store in memory.
         let runtime = tokio::runtime::Runtime::new().expect("Failed to build a Tokio runtime");
         let (blob_service, directory_service, path_info_service, nar_calculation_service) = runtime
-            .block_on(async { construct_services("memory://", "memory://", "memory://").await })
+            .block_on(async {
+                construct_services(ServiceUrlsMemory::parse_from(std::iter::empty::<&str>())).await
+            })
             .expect("Failed to construct store services in memory");
 
         let io = Rc::new(TvixStoreIO::new(
             blob_service,
             directory_service,
-            path_info_service.into(),
+            path_info_service,
             nar_calculation_service.into(),
             Arc::<DummyBuildService>::default(),
             runtime.handle().clone(),
         ));
 
-        let mut eval = tvix_eval::Evaluation::new(io.clone() as Rc<dyn EvalIO>, false);
-
-        add_derivation_builtins(&mut eval, Rc::clone(&io));
-        add_fetcher_builtins(&mut eval, Rc::clone(&io));
-        add_import_builtins(&mut eval, io);
+        let mut eval_builder = tvix_eval::Evaluation::builder(io.clone() as Rc<dyn EvalIO>);
+        eval_builder = add_derivation_builtins(eval_builder, Rc::clone(&io));
+        eval_builder = add_fetcher_builtins(eval_builder, Rc::clone(&io));
+        eval_builder = add_import_builtins(eval_builder, io);
+        let eval = eval_builder.build();
 
         // run the evaluation itself.
         eval.evaluate(str, None)
diff --git a/tvix/glue/src/fetchers/decompression.rs b/tvix/glue/src/fetchers/decompression.rs
index f96fa60e34f7..69a8297e6aa8 100644
--- a/tvix/glue/src/fetchers/decompression.rs
+++ b/tvix/glue/src/fetchers/decompression.rs
@@ -1,5 +1,3 @@
-#![allow(dead_code)] // TODO
-
 use std::{
     io, mem,
     pin::Pin,
@@ -155,9 +153,7 @@ where
         };
 
         let mut our_buf = ReadBuf::new(buffer);
-        if let Err(e) = ready!(inner.as_pin_mut().unwrap().poll_read(cx, &mut our_buf)) {
-            return Poll::Ready(Err(e));
-        }
+        ready!(inner.as_pin_mut().unwrap().poll_read(cx, &mut our_buf))?;
 
         let data = our_buf.filled();
         if data.len() >= BYTES_NEEDED {
diff --git a/tvix/glue/src/fetchers/mod.rs b/tvix/glue/src/fetchers/mod.rs
index 1b2e1ee20cc0..065d011361a7 100644
--- a/tvix/glue/src/fetchers/mod.rs
+++ b/tvix/glue/src/fetchers/mod.rs
@@ -1,19 +1,16 @@
 use futures::TryStreamExt;
-use md5::Md5;
+use md5::{digest::DynDigest, Md5};
 use nix_compat::{
     nixhash::{CAHash, HashAlgo, NixHash},
     store_path::{build_ca_path, BuildStorePathError, StorePathRef},
 };
 use sha1::Sha1;
 use sha2::{digest::Output, Digest, Sha256, Sha512};
-use tokio::io::{AsyncBufRead, AsyncRead, AsyncWrite};
-use tokio_util::io::InspectReader;
-use tracing::warn;
-use tvix_castore::{
-    blobservice::BlobService,
-    directoryservice::DirectoryService,
-    proto::{node::Node, FileNode},
-};
+use tokio::io::{AsyncBufRead, AsyncRead, AsyncWrite, AsyncWriteExt, BufReader};
+use tokio_util::io::{InspectReader, InspectWriter};
+use tracing::{instrument, warn, Span};
+use tracing_indicatif::span_ext::IndicatifSpanExt;
+use tvix_castore::{blobservice::BlobService, directoryservice::DirectoryService, Node};
 use tvix_store::{nar::NarCalculationService, pathinfoservice::PathInfoService, proto::PathInfo};
 use url::Url;
 
@@ -25,10 +22,14 @@ use decompression::DecompressedReader;
 /// Representing options for doing a fetch.
 #[derive(Clone, Eq, PartialEq)]
 pub enum Fetch {
-    /// Fetch a literal file from the given URL, with an optional expected
-    /// NixHash of it.
-    /// TODO: check if this is *always* sha256, and if so, make it [u8; 32].
-    URL(Url, Option<NixHash>),
+    /// Fetch a literal file from the given URL,
+    /// with an optional expected hash.
+    URL {
+        /// The URL to fetch from.
+        url: Url,
+        /// The expected hash of the file.
+        exp_hash: Option<NixHash>,
+    },
 
     /// Fetch a tarball from the given URL and unpack.
     /// The file must be a tape archive (.tar), optionally compressed with gzip,
@@ -37,7 +38,37 @@ pub enum Fetch {
     /// so it is best if the tarball contains a single directory at top level.
     /// Optionally, a sha256 digest can be provided to verify the unpacked
     /// contents against.
-    Tarball(Url, Option<[u8; 32]>),
+    Tarball {
+        /// The URL to fetch from.
+        url: Url,
+        /// The expected hash of the contents, as NAR.
+        exp_nar_sha256: Option<[u8; 32]>,
+    },
+
+    /// Fetch a NAR file from the given URL and unpack.
+    /// The file can optionally be compressed.
+    NAR {
+        /// The URL to fetch from.
+        url: Url,
+        /// The expected hash of the NAR representation.
+        /// This unfortunately supports more than sha256.
+        hash: NixHash,
+    },
+
+    /// Fetches a file at a URL, makes it the store path root node,
+    /// but executable.
+    /// Used by <nix/fetchurl.nix>, with `executable = true;`.
+    /// The expected hash is over the NAR representation, but can be not SHA256:
+    /// ```nix
+    /// (import <nix/fetchurl.nix> { url = "https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz"; hash = "sha1-NKNeU1csW5YJ4lCeWH3Z/apppNU="; executable = true; })
+    /// ```
+    Executable {
+        /// The URL to fetch from.
+        url: Url,
+        /// The expected hash of the NAR representation.
+        /// This unfortunately supports more than sha256.
+        hash: NixHash,
+    },
 
     /// TODO
     Git(),
@@ -60,7 +91,7 @@ fn redact_url(url: &Url) -> Url {
 impl std::fmt::Debug for Fetch {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         match self {
-            Fetch::URL(url, exp_hash) => {
+            Fetch::URL { url, exp_hash } => {
                 let url = redact_url(url);
                 if let Some(exp_hash) = exp_hash {
                     write!(f, "URL [url: {}, exp_hash: Some({})]", &url, exp_hash)
@@ -68,19 +99,30 @@ impl std::fmt::Debug for Fetch {
                     write!(f, "URL [url: {}, exp_hash: None]", &url)
                 }
             }
-            Fetch::Tarball(url, exp_digest) => {
+            Fetch::Tarball {
+                url,
+                exp_nar_sha256,
+            } => {
                 let url = redact_url(url);
-                if let Some(exp_digest) = exp_digest {
+                if let Some(exp_nar_sha256) = exp_nar_sha256 {
                     write!(
                         f,
-                        "Tarball [url: {}, exp_hash: Some({})]",
+                        "Tarball [url: {}, exp_nar_sha256: Some({})]",
                         url,
-                        NixHash::Sha256(*exp_digest)
+                        NixHash::Sha256(*exp_nar_sha256)
                     )
                 } else {
                     write!(f, "Tarball [url: {}, exp_hash: None]", url)
                 }
             }
+            Fetch::NAR { url, hash } => {
+                let url = redact_url(url);
+                write!(f, "NAR [url: {}, hash: {}]", &url, hash)
+            }
+            Fetch::Executable { url, hash } => {
+                let url = redact_url(url);
+                write!(f, "Executable [url: {}, hash: {}]", &url, hash)
+            }
             Fetch::Git() => todo!(),
         }
     }
@@ -95,9 +137,28 @@ impl Fetch {
         name: &'a str,
     ) -> Result<Option<StorePathRef<'a>>, BuildStorePathError> {
         let ca_hash = match self {
-            Fetch::URL(_, Some(nixhash)) => CAHash::Flat(nixhash.clone()),
-            Fetch::Tarball(_, Some(nar_sha256)) => CAHash::Nar(NixHash::Sha256(*nar_sha256)),
-            _ => return Ok(None),
+            Fetch::URL {
+                exp_hash: Some(exp_hash),
+                ..
+            } => CAHash::Flat(exp_hash.clone()),
+
+            Fetch::Tarball {
+                exp_nar_sha256: Some(exp_nar_sha256),
+                ..
+            } => CAHash::Nar(NixHash::Sha256(*exp_nar_sha256)),
+
+            Fetch::NAR { hash, .. } | Fetch::Executable { hash, .. } => {
+                CAHash::Nar(hash.to_owned())
+            }
+
+            Fetch::Git() => todo!(),
+
+            // everything else
+            Fetch::URL { exp_hash: None, .. }
+            | Fetch::Tarball {
+                exp_nar_sha256: None,
+                ..
+            } => return Ok(None),
         };
 
         // calculate the store path of this fetch
@@ -132,7 +193,18 @@ impl<BS, DS, PS, NS> Fetcher<BS, DS, PS, NS> {
 
     /// Constructs a HTTP request to the passed URL, and returns a AsyncReadBuf to it.
     /// In case the URI uses the file:// scheme, use tokio::fs to open it.
-    async fn download(&self, url: Url) -> Result<Box<dyn AsyncBufRead + Unpin>, FetcherError> {
+    #[instrument(skip_all, fields(url, indicatif.pb_show=1), err)]
+    async fn download(
+        &self,
+        url: Url,
+    ) -> Result<Box<dyn AsyncBufRead + Unpin + Send>, FetcherError> {
+        let span = Span::current();
+        span.pb_set_message(&format!(
+            "๐Ÿ“กFetching {}",
+            // TOOD: maybe shorten
+            redact_url(&url)
+        ));
+
         match url.scheme() {
             "file" => {
                 let f = tokio::fs::File::open(url.to_file_path().map_err(|_| {
@@ -144,16 +216,38 @@ impl<BS, DS, PS, NS> Fetcher<BS, DS, PS, NS> {
                     ))
                 })?)
                 .await?;
-                Ok(Box::new(tokio::io::BufReader::new(f)))
+
+                span.pb_set_length(f.metadata().await?.len());
+                span.pb_set_style(&tvix_tracing::PB_TRANSFER_STYLE);
+                span.pb_start();
+                Ok(Box::new(tokio::io::BufReader::new(InspectReader::new(
+                    f,
+                    move |d| {
+                        span.pb_inc(d.len() as u64);
+                    },
+                ))))
             }
             _ => {
                 let resp = self.http_client.get(url).send().await?;
+
+                if let Some(content_length) = resp.content_length() {
+                    span.pb_set_length(content_length);
+                    span.pb_set_style(&tvix_tracing::PB_TRANSFER_STYLE);
+                } else {
+                    span.pb_set_style(&tvix_tracing::PB_TRANSFER_STYLE);
+                }
+                span.pb_start();
+
                 Ok(Box::new(tokio_util::io::StreamReader::new(
-                    resp.bytes_stream().map_err(|e| {
-                        let e = e.without_url();
-                        warn!(%e, "failed to get response body");
-                        std::io::Error::new(std::io::ErrorKind::BrokenPipe, e)
-                    }),
+                    resp.bytes_stream()
+                        .inspect_ok(move |d| {
+                            span.pb_inc(d.len() as u64);
+                        })
+                        .map_err(|e| {
+                            let e = e.without_url();
+                            warn!(%e, "failed to get response body");
+                            std::io::Error::new(std::io::ErrorKind::BrokenPipe, e)
+                        }),
                 )))
             }
         }
@@ -190,7 +284,7 @@ where
     /// didn't match the previously communicated hash contained inside the FetchArgs.
     pub async fn ingest(&self, fetch: Fetch) -> Result<(Node, CAHash, u64), FetcherError> {
         match fetch {
-            Fetch::URL(url, exp_hash) => {
+            Fetch::URL { url, exp_hash } => {
                 // Construct a AsyncRead reading from the data as its downloaded.
                 let mut r = self.download(url.clone()).await?;
 
@@ -199,7 +293,7 @@ where
 
                 // Copy the contents from the download reader to the blob writer.
                 // Calculate the digest of the file received, depending on the
-                // communicated expected hash (or sha256 if none provided).
+                // communicated expected hash algo (or sha256 if none provided).
                 let (actual_hash, blob_size) = match exp_hash
                     .as_ref()
                     .map(NixHash::algo)
@@ -233,17 +327,19 @@ where
 
                 // Construct and return the FileNode describing the downloaded contents.
                 Ok((
-                    Node::File(FileNode {
-                        name: vec![].into(),
-                        digest: blob_writer.close().await?.into(),
+                    Node::File {
+                        digest: blob_writer.close().await?,
                         size: blob_size,
                         executable: false,
-                    }),
+                    },
                     CAHash::Flat(actual_hash),
                     blob_size,
                 ))
             }
-            Fetch::Tarball(url, exp_nar_sha256) => {
+            Fetch::Tarball {
+                url,
+                exp_nar_sha256,
+            } => {
                 // Construct a AsyncRead reading from the data as its downloaded.
                 let r = self.download(url.clone()).await?;
 
@@ -252,7 +348,7 @@ where
                 // Open the archive.
                 let archive = tokio_tar::Archive::new(r);
 
-                // Ingest the archive, get the root node
+                // Ingest the archive, get the root node.
                 let node = tvix_castore::import::archive::ingest_archive(
                     self.blob_service.clone(),
                     self.directory_service.clone(),
@@ -263,7 +359,7 @@ where
                 // If an expected NAR sha256 was provided, compare with the one
                 // calculated from our root node.
                 // Even if no expected NAR sha256 has been provided, we need
-                // the actual one later.
+                // the actual one to calculate the store path.
                 let (nar_size, actual_nar_sha256) = self
                     .nar_calculation_service
                     .calculate_nar(&node)
@@ -289,6 +385,155 @@ where
                     nar_size,
                 ))
             }
+            Fetch::NAR {
+                url,
+                hash: exp_hash,
+            } => {
+                // Construct a AsyncRead reading from the data as its downloaded.
+                let r = self.download(url.clone()).await?;
+
+                // Pop compression.
+                let r = DecompressedReader::new(r);
+
+                // Wrap the reader, calculating our own hash.
+                let mut hasher: Box<dyn DynDigest + Send> = match exp_hash.algo() {
+                    HashAlgo::Md5 => Box::new(Md5::new()),
+                    HashAlgo::Sha1 => Box::new(Sha1::new()),
+                    HashAlgo::Sha256 => Box::new(Sha256::new()),
+                    HashAlgo::Sha512 => Box::new(Sha512::new()),
+                };
+                let mut r = tokio_util::io::InspectReader::new(r, |b| {
+                    hasher.update(b);
+                });
+
+                // Ingest the NAR, get the root node.
+                let (root_node, _actual_nar_sha256, actual_nar_size) =
+                    tvix_store::nar::ingest_nar_and_hash(
+                        self.blob_service.clone(),
+                        self.directory_service.clone(),
+                        &mut r,
+                    )
+                    .await
+                    .map_err(|e| FetcherError::Io(std::io::Error::other(e.to_string())))?;
+
+                // finalize the hasher.
+                let actual_hash = {
+                    match exp_hash.algo() {
+                        HashAlgo::Md5 => {
+                            NixHash::Md5(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                        HashAlgo::Sha1 => {
+                            NixHash::Sha1(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                        HashAlgo::Sha256 => {
+                            NixHash::Sha256(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                        HashAlgo::Sha512 => {
+                            NixHash::Sha512(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                    }
+                };
+
+                // Ensure the hash matches.
+                if exp_hash != actual_hash {
+                    return Err(FetcherError::HashMismatch {
+                        url,
+                        wanted: exp_hash,
+                        got: actual_hash,
+                    });
+                }
+                Ok((
+                    root_node,
+                    // use a CAHash::Nar with the algo from the input.
+                    CAHash::Nar(exp_hash),
+                    actual_nar_size,
+                ))
+            }
+            Fetch::Executable {
+                url,
+                hash: exp_hash,
+            } => {
+                // Construct a AsyncRead reading from the data as its downloaded.
+                let mut r = self.download(url.clone()).await?;
+
+                // Construct a AsyncWrite to write into the BlobService.
+                let mut blob_writer = self.blob_service.open_write().await;
+
+                // Copy the contents from the download reader to the blob writer.
+                let file_size = tokio::io::copy(&mut r, &mut blob_writer).await?;
+                let blob_digest = blob_writer.close().await?;
+
+                // Render the NAR representation on-the-fly into a hash function with
+                // the same algo as our expected hash.
+                // We cannot do this upfront, as we don't know the actual size.
+                // FUTUREWORK: make opportunistic use of Content-Length header?
+
+                let w = tokio::io::sink();
+                // Construct the hash function.
+                let mut hasher: Box<dyn DynDigest + Send> = match exp_hash.algo() {
+                    HashAlgo::Md5 => Box::new(Md5::new()),
+                    HashAlgo::Sha1 => Box::new(Sha1::new()),
+                    HashAlgo::Sha256 => Box::new(Sha256::new()),
+                    HashAlgo::Sha512 => Box::new(Sha512::new()),
+                };
+
+                let mut nar_size: u64 = 0;
+                let mut w = InspectWriter::new(w, |d| {
+                    hasher.update(d);
+                    nar_size += d.len() as u64;
+                });
+
+                {
+                    let node = nix_compat::nar::writer::r#async::open(&mut w).await?;
+
+                    let blob_reader = self
+                        .blob_service
+                        .open_read(&blob_digest)
+                        .await?
+                        .expect("Tvix bug: just-uploaded blob not found");
+
+                    node.file(true, file_size, &mut BufReader::new(blob_reader))
+                        .await?;
+
+                    w.flush().await?;
+                }
+
+                // finalize the hasher.
+                let actual_hash = {
+                    match exp_hash.algo() {
+                        HashAlgo::Md5 => {
+                            NixHash::Md5(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                        HashAlgo::Sha1 => {
+                            NixHash::Sha1(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                        HashAlgo::Sha256 => {
+                            NixHash::Sha256(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                        HashAlgo::Sha512 => {
+                            NixHash::Sha512(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                    }
+                };
+
+                if exp_hash != actual_hash {
+                    return Err(FetcherError::HashMismatch {
+                        url,
+                        wanted: exp_hash,
+                        got: actual_hash,
+                    });
+                }
+
+                // Construct and return the FileNode describing the downloaded contents,
+                // make it executable.
+                let root_node = Node::File {
+                    digest: blob_digest,
+                    size: file_size,
+                    executable: true,
+                };
+
+                Ok((root_node, CAHash::Nar(actual_hash), file_size))
+            }
             Fetch::Git() => todo!(),
         }
     }
@@ -306,29 +551,30 @@ where
         // Fetch file, return the (unnamed) (File)Node of its contents, ca hash and filesize.
         let (node, ca_hash, size) = self.ingest(fetch).await?;
 
-        // Calculate the store path to return later, which is done with the ca_hash.
+        // Calculate the store path to return, by calculating from ca_hash.
         let store_path = build_ca_path(name, &ca_hash, Vec::<String>::new(), false)?;
 
-        // Rename the node name to match the Store Path.
-        let node = node.rename(store_path.to_string().into());
-
         // If the resulting hash is not a CAHash::Nar, we also need to invoke
         // `calculate_nar` to calculate this representation, as it's required in
         // the [PathInfo].
+        // FUTUREWORK: allow ingest() to return multiple hashes, or have it feed
+        // nar_calculation_service too?
         let (nar_size, nar_sha256) = match &ca_hash {
-            CAHash::Flat(_nix_hash) => self
+            CAHash::Nar(NixHash::Sha256(nar_sha256)) => (size, *nar_sha256),
+            CAHash::Nar(_) | CAHash::Flat(_) => self
                 .nar_calculation_service
                 .calculate_nar(&node)
                 .await
                 .map_err(|e| FetcherError::Io(e.into()))?,
-            CAHash::Nar(NixHash::Sha256(nar_sha256)) => (size, *nar_sha256),
-            CAHash::Nar(_) => unreachable!("Tvix bug: fetch returned non-sha256 CAHash::Nar"),
             CAHash::Text(_) => unreachable!("Tvix bug: fetch returned CAHash::Text"),
         };
 
         // Construct the PathInfo and persist it.
         let path_info = PathInfo {
-            node: Some(tvix_castore::proto::Node { node: Some(node) }),
+            node: Some(tvix_castore::proto::Node::from_name_and_node(
+                store_path.to_string().into(),
+                node.clone(),
+            )),
             references: vec![],
             narinfo: Some(tvix_store::proto::NarInfo {
                 nar_size,
@@ -340,18 +586,18 @@ where
             }),
         };
 
-        let path_info = self
-            .path_info_service
+        self.path_info_service
             .put(path_info)
             .await
             .map_err(|e| FetcherError::Io(e.into()))?;
 
-        Ok((store_path, path_info.node.unwrap().node.unwrap()))
+        Ok((store_path, node))
     }
 }
 
 /// Attempts to mimic `nix::libutil::baseNameOf`
-pub(crate) fn url_basename(s: &str) -> &str {
+pub(crate) fn url_basename(url: &Url) -> &str {
+    let s = url.path();
     if s.is_empty() {
         return "";
     }
@@ -382,38 +628,82 @@ pub(crate) fn url_basename(s: &str) -> &str {
 #[cfg(test)]
 mod tests {
     mod fetch {
-        use nix_compat::nixbase32;
-
-        use crate::fetchers::Fetch;
-
         use super::super::*;
-
-        #[test]
-        fn fetchurl_store_path() {
-            let url = Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap();
-            let exp_hash = NixHash::Sha256(
-                nixbase32::decode_fixed("0nawkl04sj7psw6ikzay7kydj3dhd0fkwghcsf5rzaw4bmp4kbax")
-                    .unwrap(),
-            );
-
-            let fetch = Fetch::URL(url, Some(exp_hash));
+        use crate::fetchers::Fetch;
+        use nix_compat::{nixbase32, nixhash};
+        use rstest::rstest;
+
+        #[rstest]
+        #[case::url_no_hash(
+            Fetch::URL{
+                url: Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap(),
+                exp_hash: None,
+            },
+            None,
+            "notmuch-extract-patch"
+        )]
+        #[case::url_sha256(
+            Fetch::URL{
+                url: Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap(),
+                exp_hash: Some(nixhash::from_sri_str("sha256-Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=").unwrap()),
+            },
+            Some(StorePathRef::from_bytes(b"06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch").unwrap()),
+            "notmuch-extract-patch"
+        )]
+        #[case::url_custom_name(
+            Fetch::URL{
+                url: Url::parse("https://test.example/owo").unwrap(),
+                exp_hash: Some(nixhash::from_sri_str("sha256-Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=").unwrap()),
+            },
+            Some(StorePathRef::from_bytes(b"06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch").unwrap()),
+            "notmuch-extract-patch"
+        )]
+        #[case::nar_sha256(
+            Fetch::NAR{
+                url: Url::parse("https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz").unwrap(),
+                hash: nixhash::from_sri_str("sha256-oj6yfWKbcEerK8D9GdPJtIAOveNcsH1ztGeSARGypRA=").unwrap(),
+            },
+            Some(StorePathRef::from_bytes(b"b40vjphshq4fdgv8s3yrp0bdlafi4920-0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz").unwrap()),
+            "0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz"
+        )]
+        #[case::nar_sha1(
+            Fetch::NAR{
+                url: Url::parse("https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz").unwrap(),
+                hash: nixhash::from_sri_str("sha1-F/fMsgwkXF8fPCg1v9zPZ4yOFIA=").unwrap(),
+            },
+            Some(StorePathRef::from_bytes(b"8kx7fdkdbzs4fkfb57xq0cbhs20ymq2n-0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz").unwrap()),
+            "0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz"
+        )]
+        #[case::nar_sha1(
+            Fetch::Executable{
+                url: Url::parse("https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz").unwrap(),
+                hash: nixhash::from_sri_str("sha1-NKNeU1csW5YJ4lCeWH3Z/apppNU=").unwrap(),
+            },
+            Some(StorePathRef::from_bytes(b"y92hm2xfk1009hrq0ix80j4m5k4j4w21-0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz").unwrap()),
+            "0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz"
+        )]
+        fn fetch_store_path(
+            #[case] fetch: Fetch,
+            #[case] exp_path: Option<StorePathRef>,
+            #[case] name: &str,
+        ) {
             assert_eq!(
-                "06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch",
-                &fetch
-                    .store_path("notmuch-extract-patch")
-                    .unwrap()
-                    .unwrap()
-                    .to_string(),
-            )
+                exp_path,
+                fetch.store_path(name).expect("invalid name"),
+                "unexpected calculated store path"
+            );
         }
 
         #[test]
         fn fetch_tarball_store_path() {
             let url = Url::parse("https://github.com/NixOS/nixpkgs/archive/91050ea1e57e50388fa87a3302ba12d188ef723a.tar.gz").unwrap();
-            let exp_nixbase32 =
+            let exp_sha256 =
                 nixbase32::decode_fixed("1hf6cgaci1n186kkkjq106ryf8mmlq9vnwgfwh625wa8hfgdn4dm")
                     .unwrap();
-            let fetch = Fetch::Tarball(url, Some(exp_nixbase32));
+            let fetch = Fetch::Tarball {
+                url,
+                exp_nar_sha256: Some(exp_sha256),
+            };
 
             assert_eq!(
                 "7adgvk5zdfq4pwrhsm3n9lzypb12gw0g-source",
@@ -424,30 +714,18 @@ mod tests {
 
     mod url_basename {
         use super::super::*;
-
-        #[test]
-        fn empty_path() {
-            assert_eq!(url_basename(""), "");
-        }
-
-        #[test]
-        fn path_on_root() {
-            assert_eq!(url_basename("/dir"), "dir");
-        }
-
-        #[test]
-        fn relative_path() {
-            assert_eq!(url_basename("dir/foo"), "foo");
-        }
-
-        #[test]
-        fn root_with_trailing_slash() {
-            assert_eq!(url_basename("/"), "");
-        }
-
-        #[test]
-        fn trailing_slash() {
-            assert_eq!(url_basename("/dir/"), "dir");
+        use rstest::rstest;
+
+        #[rstest]
+        #[case::empty_path("", "")]
+        #[case::path_on_root("/dir", "dir")]
+        #[case::relative_path("dir/foo", "foo")]
+        #[case::root_with_trailing_slash("/", "")]
+        #[case::trailing_slash("/dir/", "dir")]
+        fn test_url_basename(#[case] url_path: &str, #[case] exp_basename: &str) {
+            let mut url = Url::parse("http://localhost").expect("invalid url");
+            url.set_path(url_path);
+            assert_eq!(url_basename(&url), exp_basename);
         }
     }
 }
diff --git a/tvix/glue/src/fetchurl.rs b/tvix/glue/src/fetchurl.rs
new file mode 100644
index 000000000000..9f57868b1991
--- /dev/null
+++ b/tvix/glue/src/fetchurl.rs
@@ -0,0 +1,82 @@
+//! This contains the code translating from a `builtin:derivation` [Derivation]
+//! to a [Fetch].
+use crate::fetchers::Fetch;
+use nix_compat::{derivation::Derivation, nixhash::CAHash};
+use tracing::instrument;
+use url::Url;
+
+/// Takes a derivation produced by a call to `builtin:fetchurl` and returns the
+/// synthesized [Fetch] for it, as well as the name.
+#[instrument]
+pub(crate) fn fetchurl_derivation_to_fetch(drv: &Derivation) -> Result<(String, Fetch), Error> {
+    if drv.builder != "builtin:fetchurl" {
+        return Err(Error::BuilderInvalid);
+    }
+    if !drv.arguments.is_empty() {
+        return Err(Error::ArgumentsInvalud);
+    }
+    if drv.system != "builtin" {
+        return Err(Error::SystemInvalid);
+    }
+
+    // ensure this is a fixed-output derivation
+    if drv.outputs.len() != 1 {
+        return Err(Error::NoFOD);
+    }
+    let out_output = &drv.outputs.get("out").ok_or(Error::NoFOD)?;
+    let ca_hash = out_output.ca_hash.clone().ok_or(Error::NoFOD)?;
+
+    let name: String = drv
+        .environment
+        .get("name")
+        .ok_or(Error::NameMissing)?
+        .to_owned()
+        .try_into()
+        .map_err(|_| Error::NameInvalid)?;
+
+    let url: Url = std::str::from_utf8(drv.environment.get("url").ok_or(Error::URLMissing)?)
+        .map_err(|_| Error::URLInvalid)?
+        .parse()
+        .map_err(|_| Error::URLInvalid)?;
+
+    match ca_hash {
+        CAHash::Flat(hash) => {
+            return Ok((
+                name,
+                Fetch::URL {
+                    url,
+                    exp_hash: Some(hash),
+                },
+            ))
+        }
+        CAHash::Nar(hash) => {
+            if drv.environment.get("executable").map(|v| v.as_slice()) == Some(b"1") {
+                Ok((name, Fetch::Executable { url, hash }))
+            } else {
+                Ok((name, Fetch::NAR { url, hash }))
+            }
+        }
+        // you can't construct derivations containing this
+        CAHash::Text(_) => panic!("Tvix bug: got CaHash::Text in drv"),
+    }
+}
+
+#[derive(Debug, thiserror::Error)]
+pub(crate) enum Error {
+    #[error("Invalid builder")]
+    BuilderInvalid,
+    #[error("invalid arguments")]
+    ArgumentsInvalud,
+    #[error("Invalid system")]
+    SystemInvalid,
+    #[error("Derivation is not fixed-output")]
+    NoFOD,
+    #[error("Missing URL")]
+    URLMissing,
+    #[error("Invalid URL")]
+    URLInvalid,
+    #[error("Missing Name")]
+    NameMissing,
+    #[error("Name invalid")]
+    NameInvalid,
+}
diff --git a/tvix/glue/src/known_paths.rs b/tvix/glue/src/known_paths.rs
index 290c9d5b6988..239acca9829a 100644
--- a/tvix/glue/src/known_paths.rs
+++ b/tvix/glue/src/known_paths.rs
@@ -25,27 +25,27 @@ pub struct KnownPaths {
     ///
     /// Keys are derivation paths, values are a tuple of the "hash derivation
     /// modulo" and the Derivation struct itself.
-    derivations: HashMap<StorePath, ([u8; 32], Derivation)>,
+    derivations: HashMap<StorePath<String>, ([u8; 32], Derivation)>,
 
     /// A map from output path to (one) drv path.
     /// Note that in the case of FODs, multiple drvs can produce the same output
     /// path. We use one of them.
-    outputs_to_drvpath: HashMap<StorePath, StorePath>,
+    outputs_to_drvpath: HashMap<StorePath<String>, StorePath<String>>,
 
     /// A map from output path to fetches (and their names).
-    outputs_to_fetches: HashMap<StorePath, (String, Fetch)>,
+    outputs_to_fetches: HashMap<StorePath<String>, (String, Fetch)>,
 }
 
 impl KnownPaths {
     /// Fetch the opaque "hash derivation modulo" for a given derivation path.
-    pub fn get_hash_derivation_modulo(&self, drv_path: &StorePath) -> Option<&[u8; 32]> {
+    pub fn get_hash_derivation_modulo(&self, drv_path: &StorePath<String>) -> Option<&[u8; 32]> {
         self.derivations
             .get(drv_path)
             .map(|(hash_derivation_modulo, _derivation)| hash_derivation_modulo)
     }
 
     /// Return a reference to the Derivation for a given drv path.
-    pub fn get_drv_by_drvpath(&self, drv_path: &StorePath) -> Option<&Derivation> {
+    pub fn get_drv_by_drvpath(&self, drv_path: &StorePath<String>) -> Option<&Derivation> {
         self.derivations
             .get(drv_path)
             .map(|(_hash_derivation_modulo, derivation)| derivation)
@@ -54,7 +54,10 @@ impl KnownPaths {
     /// Return the drv path of the derivation producing the passed output path.
     /// Note there can be multiple Derivations producing the same output path in
     /// flight; this function will only return one of them.
-    pub fn get_drv_path_for_output_path(&self, output_path: &StorePath) -> Option<&StorePath> {
+    pub fn get_drv_path_for_output_path(
+        &self,
+        output_path: &StorePath<String>,
+    ) -> Option<&StorePath<String>> {
         self.outputs_to_drvpath.get(output_path)
     }
 
@@ -63,7 +66,7 @@ impl KnownPaths {
     /// be fully calculated.
     /// All input derivations this refers to must also be inserted to this
     /// struct.
-    pub fn add_derivation(&mut self, drv_path: StorePath, drv: Derivation) {
+    pub fn add_derivation(&mut self, drv_path: StorePath<String>, drv: Derivation) {
         // check input derivations to have been inserted.
         #[cfg(debug_assertions)]
         {
@@ -124,16 +127,24 @@ impl KnownPaths {
 
     /// Return the name and fetch producing the passed output path.
     /// Note there can also be (multiple) Derivations producing the same output path.
-    pub fn get_fetch_for_output_path(&self, output_path: &StorePath) -> Option<(String, Fetch)> {
+    pub fn get_fetch_for_output_path(
+        &self,
+        output_path: &StorePath<String>,
+    ) -> Option<(String, Fetch)> {
         self.outputs_to_fetches
             .get(output_path)
             .map(|(name, fetch)| (name.to_owned(), fetch.to_owned()))
     }
+
+    /// Returns an iterator over all known derivations and their store path.
+    pub fn get_derivations(&self) -> impl Iterator<Item = (&StorePath<String>, &Derivation)> {
+        self.derivations.iter().map(|(k, v)| (k, &v.1))
+    }
 }
 
 #[cfg(test)]
 mod tests {
-    use nix_compat::{derivation::Derivation, nixbase32, nixhash::NixHash, store_path::StorePath};
+    use nix_compat::{derivation::Derivation, nixbase32, nixhash, store_path::StorePath};
     use url::Url;
 
     use crate::fetchers::Fetch;
@@ -151,26 +162,26 @@ mod tests {
             "tests/ch49594n9avinrf8ip0aslidkc4lxkqv-foo.drv"
         ))
         .expect("must parse");
-        static ref BAR_DRV_PATH: StorePath =
+        static ref BAR_DRV_PATH: StorePath<String> =
             StorePath::from_bytes(b"ss2p4wmxijn652haqyd7dckxwl4c7hxx-bar.drv").expect("must parse");
-        static ref FOO_DRV_PATH: StorePath =
+        static ref FOO_DRV_PATH: StorePath<String> =
             StorePath::from_bytes(b"ch49594n9avinrf8ip0aslidkc4lxkqv-foo.drv").expect("must parse");
-        static ref BAR_OUT_PATH: StorePath =
+        static ref BAR_OUT_PATH: StorePath<String> =
             StorePath::from_bytes(b"mp57d33657rf34lzvlbpfa1gjfv5gmpg-bar").expect("must parse");
-        static ref FOO_OUT_PATH: StorePath =
+        static ref FOO_OUT_PATH: StorePath<String> =
             StorePath::from_bytes(b"fhaj6gmwns62s6ypkcldbaj2ybvkhx3p-foo").expect("must parse");
 
-        static ref FETCH_URL : Fetch = Fetch::URL(
-            Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap(),
-            Some(NixHash::Sha256(nixbase32::decode_fixed("0nawkl04sj7psw6ikzay7kydj3dhd0fkwghcsf5rzaw4bmp4kbax").unwrap()))
-        );
-        static ref FETCH_URL_OUT_PATH: StorePath = StorePath::from_bytes(b"06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch").unwrap();
-
-        static ref FETCH_TARBALL : Fetch = Fetch::Tarball(
-            Url::parse("https://github.com/NixOS/nixpkgs/archive/91050ea1e57e50388fa87a3302ba12d188ef723a.tar.gz").unwrap(),
-            Some(nixbase32::decode_fixed("1hf6cgaci1n186kkkjq106ryf8mmlq9vnwgfwh625wa8hfgdn4dm").unwrap())
-        );
-        static ref FETCH_TARBALL_OUT_PATH: StorePath = StorePath::from_bytes(b"7adgvk5zdfq4pwrhsm3n9lzypb12gw0g-source").unwrap();
+        static ref FETCH_URL : Fetch = Fetch::URL{
+            url: Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap(),
+            exp_hash: Some(nixhash::from_sri_str("sha256-Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=").unwrap())
+        };
+        static ref FETCH_URL_OUT_PATH: StorePath<String> = StorePath::from_bytes(b"06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch").unwrap();
+
+        static ref FETCH_TARBALL : Fetch = Fetch::Tarball{
+            url: Url::parse("https://github.com/NixOS/nixpkgs/archive/91050ea1e57e50388fa87a3302ba12d188ef723a.tar.gz").unwrap(),
+            exp_nar_sha256: Some(nixbase32::decode_fixed("1hf6cgaci1n186kkkjq106ryf8mmlq9vnwgfwh625wa8hfgdn4dm").unwrap())
+        };
+        static ref FETCH_TARBALL_OUT_PATH: StorePath<String> = StorePath::from_bytes(b"7adgvk5zdfq4pwrhsm3n9lzypb12gw0g-source").unwrap();
     }
 
     /// ensure we don't allow acdding a Derivation that depends on another,
@@ -267,22 +278,22 @@ mod tests {
                 .unwrap()
                 .to_owned()
         );
+    }
 
-        // We should be able to get these fetches out, when asking for their out path.
-        let (got_name, got_fetch) = known_paths
-            .get_fetch_for_output_path(&FETCH_URL_OUT_PATH)
-            .expect("must be some");
-
-        assert_eq!("notmuch-extract-patch", got_name);
-        assert_eq!(FETCH_URL.clone(), got_fetch);
+    #[test]
+    fn get_derivations_working() {
+        let mut known_paths = KnownPaths::default();
 
-        // โ€ฆ multiple times.
-        let (got_name, got_fetch) = known_paths
-            .get_fetch_for_output_path(&FETCH_URL_OUT_PATH)
-            .expect("must be some");
+        // Add BAR_DRV
+        known_paths.add_derivation(BAR_DRV_PATH.clone(), BAR_DRV.clone());
 
-        assert_eq!("notmuch-extract-patch", got_name);
-        assert_eq!(FETCH_URL.clone(), got_fetch);
+        // We should be able to find BAR_DRV_PATH and BAR_DRV as a pair in get_derivations.
+        assert_eq!(
+            Some((&BAR_DRV_PATH.clone(), &BAR_DRV.clone())),
+            known_paths
+                .get_derivations()
+                .find(|(s, d)| (*s, *d) == (&BAR_DRV_PATH, &BAR_DRV))
+        );
     }
 
     // TODO: add test panicking about missing digest
diff --git a/tvix/glue/src/lib.rs b/tvix/glue/src/lib.rs
index 2e5a3be103a1..f6f99f3c32a3 100644
--- a/tvix/glue/src/lib.rs
+++ b/tvix/glue/src/lib.rs
@@ -6,18 +6,22 @@ pub mod tvix_build;
 pub mod tvix_io;
 pub mod tvix_store_io;
 
+mod fetchurl;
+
 #[cfg(test)]
 mod tests;
 
 /// Tell the Evaluator to resolve `<nix>` to the path `/__corepkgs__`,
 /// which has special handling in [tvix_io::TvixIO].
 /// This is used in nixpkgs to import `fetchurl.nix` from `<nix>`.
-pub fn configure_nix_path<IO>(
-    eval: &mut tvix_eval::Evaluation<IO>,
+pub fn configure_nix_path<'co, 'ro, 'env, IO>(
+    eval_builder: tvix_eval::EvaluationBuilder<'co, 'ro, 'env, IO>,
     nix_search_path: &Option<String>,
-) {
-    eval.nix_path = nix_search_path
-        .as_ref()
-        .map(|p| format!("nix=/__corepkgs__:{}", p))
-        .or_else(|| Some("nix=/__corepkgs__".to_string()));
+) -> tvix_eval::EvaluationBuilder<'co, 'ro, 'env, IO> {
+    eval_builder.nix_path(
+        nix_search_path
+            .as_ref()
+            .map(|p| format!("nix=/__corepkgs__:{}", p))
+            .or_else(|| Some("nix=/__corepkgs__".to_string())),
+    )
 }
diff --git a/tvix/glue/src/refscan.rs b/tvix/glue/src/refscan.rs
index 0e0bb6c77828..80a126349746 100644
--- a/tvix/glue/src/refscan.rs
+++ b/tvix/glue/src/refscan.rs
@@ -6,62 +6,250 @@
 //!
 //! The scanner itself is using the Wu-Manber string-matching algorithm, using
 //! our fork of the `wu-mamber` crate.
-
+use pin_project::pin_project;
 use std::collections::BTreeSet;
+use std::pin::Pin;
+use std::sync::Arc;
+use std::task::{ready, Poll};
+use tokio::io::{AsyncBufRead, AsyncRead, ReadBuf};
 use wu_manber::TwoByteWM;
 
-pub const STORE_PATH_LEN: usize = "/nix/store/00000000000000000000000000000000".len();
-
-/// Represents a "primed" reference scanner with an automaton that knows the set
-/// of store paths to scan for.
-pub struct ReferenceScanner<P: Ord + AsRef<[u8]>> {
+/// A searcher that incapsulates the candidates and the Wu-Manber searcher.
+/// This is separate from the scanner because we need to look for the same
+/// pattern in multiple outputs and don't want to pay the price of constructing
+/// the searcher for each build output.
+pub struct ReferencePatternInner<P> {
     candidates: Vec<P>,
+    longest_candidate: usize,
+    // FUTUREWORK: Support overlapping patterns to be compatible with cpp Nix
     searcher: Option<TwoByteWM>,
-    matches: Vec<usize>,
 }
 
-impl<P: Clone + Ord + AsRef<[u8]>> ReferenceScanner<P> {
-    /// Construct a new `ReferenceScanner` that knows how to scan for the given
-    /// candidate store paths.
+#[derive(Clone)]
+pub struct ReferencePattern<P> {
+    inner: Arc<ReferencePatternInner<P>>,
+}
+
+impl<P> ReferencePattern<P> {
+    pub fn candidates(&self) -> &[P] {
+        &self.inner.candidates
+    }
+
+    pub fn longest_candidate(&self) -> usize {
+        self.inner.longest_candidate
+    }
+}
+
+impl<P: AsRef<[u8]>> ReferencePattern<P> {
+    /// Construct a new `ReferencePattern` that knows how to scan for the given
+    /// candidates.
     pub fn new(candidates: Vec<P>) -> Self {
         let searcher = if candidates.is_empty() {
             None
         } else {
             Some(TwoByteWM::new(&candidates))
         };
+        let longest_candidate = candidates.iter().fold(0, |v, c| v.max(c.as_ref().len()));
 
-        ReferenceScanner {
-            searcher,
-            candidates,
-            matches: Default::default(),
+        ReferencePattern {
+            inner: Arc::new(ReferencePatternInner {
+                searcher,
+                candidates,
+                longest_candidate,
+            }),
         }
     }
+}
+
+impl<P> From<Vec<P>> for ReferencePattern<P>
+where
+    P: AsRef<[u8]>,
+{
+    fn from(candidates: Vec<P>) -> Self {
+        Self::new(candidates)
+    }
+}
+
+/// Represents a "primed" reference scanner with an automaton that knows the set
+/// of bytes patterns to scan for.
+pub struct ReferenceScanner<P> {
+    pattern: ReferencePattern<P>,
+    matches: Vec<bool>,
+}
+
+impl<P: AsRef<[u8]>> ReferenceScanner<P> {
+    /// Construct a new `ReferenceScanner` that knows how to scan for the given
+    /// candidate bytes patterns.
+    pub fn new<IP: Into<ReferencePattern<P>>>(pattern: IP) -> Self {
+        let pattern = pattern.into();
+        let matches = vec![false; pattern.candidates().len()];
+        ReferenceScanner { pattern, matches }
+    }
 
-    /// Scan the given str for all non-overlapping matches and collect them
+    /// Scan the given buffer for all non-overlapping matches and collect them
     /// in the scanner.
     pub fn scan<S: AsRef<[u8]>>(&mut self, haystack: S) {
-        if haystack.as_ref().len() < STORE_PATH_LEN {
+        if haystack.as_ref().len() < self.pattern.longest_candidate() {
             return;
         }
 
-        if let Some(searcher) = &self.searcher {
+        if let Some(searcher) = &self.pattern.inner.searcher {
             for m in searcher.find(haystack) {
-                self.matches.push(m.pat_idx);
+                self.matches[m.pat_idx] = true;
             }
         }
     }
 
+    pub fn pattern(&self) -> &ReferencePattern<P> {
+        &self.pattern
+    }
+
+    pub fn matches(&self) -> &[bool] {
+        &self.matches
+    }
+
+    pub fn candidate_matches(&self) -> impl Iterator<Item = &P> {
+        let candidates = self.pattern.candidates();
+        self.matches.iter().enumerate().filter_map(|(idx, found)| {
+            if *found {
+                Some(&candidates[idx])
+            } else {
+                None
+            }
+        })
+    }
+}
+
+impl<P: Clone + Ord + AsRef<[u8]>> ReferenceScanner<P> {
     /// Finalise the reference scanner and return the resulting matches.
     pub fn finalise(self) -> BTreeSet<P> {
-        self.matches
-            .into_iter()
-            .map(|idx| self.candidates[idx].clone())
-            .collect()
+        self.candidate_matches().cloned().collect()
+    }
+}
+
+const DEFAULT_BUF_SIZE: usize = 8 * 1024;
+
+#[pin_project]
+pub struct ReferenceReader<P, R> {
+    scanner: ReferenceScanner<P>,
+    buffer: Vec<u8>,
+    consumed: usize,
+    #[pin]
+    reader: R,
+}
+
+impl<P, R> ReferenceReader<P, R>
+where
+    P: AsRef<[u8]>,
+{
+    pub fn new(pattern: ReferencePattern<P>, reader: R) -> ReferenceReader<P, R> {
+        Self::with_capacity(DEFAULT_BUF_SIZE, pattern, reader)
+    }
+
+    pub fn with_capacity(
+        capacity: usize,
+        pattern: ReferencePattern<P>,
+        reader: R,
+    ) -> ReferenceReader<P, R> {
+        // If capacity is not at least as long as longest_candidate we can't do a scan
+        let capacity = capacity.max(pattern.longest_candidate());
+        ReferenceReader {
+            scanner: ReferenceScanner::new(pattern),
+            buffer: Vec::with_capacity(capacity),
+            consumed: 0,
+            reader,
+        }
+    }
+
+    pub fn scanner(&self) -> &ReferenceScanner<P> {
+        &self.scanner
+    }
+}
+
+impl<P, R> ReferenceReader<P, R>
+where
+    P: Clone + Ord + AsRef<[u8]>,
+{
+    pub fn finalise(self) -> BTreeSet<P> {
+        self.scanner.finalise()
+    }
+}
+
+impl<P, R> AsyncRead for ReferenceReader<P, R>
+where
+    R: AsyncRead,
+    P: AsRef<[u8]>,
+{
+    fn poll_read(
+        mut self: Pin<&mut Self>,
+        cx: &mut std::task::Context<'_>,
+        buf: &mut tokio::io::ReadBuf<'_>,
+    ) -> Poll<std::io::Result<()>> {
+        let internal_buf = ready!(self.as_mut().poll_fill_buf(cx))?;
+        let amt = buf.remaining().min(internal_buf.len());
+        buf.put_slice(&internal_buf[..amt]);
+        self.consume(amt);
+        Poll::Ready(Ok(()))
+    }
+}
+
+impl<P, R> AsyncBufRead for ReferenceReader<P, R>
+where
+    R: AsyncRead,
+    P: AsRef<[u8]>,
+{
+    fn poll_fill_buf(
+        self: Pin<&mut Self>,
+        cx: &mut std::task::Context<'_>,
+    ) -> Poll<std::io::Result<&[u8]>> {
+        let overlap = self.scanner.pattern.longest_candidate() - 1;
+        let mut this = self.project();
+        // Still data in buffer
+        if *this.consumed < this.buffer.len() {
+            return Poll::Ready(Ok(&this.buffer[*this.consumed..]));
+        }
+        // We need to copy last `overlap` bytes to front to deal with references that overlap reads
+        if *this.consumed > overlap {
+            let start = this.buffer.len() - overlap;
+            this.buffer.copy_within(start.., 0);
+            this.buffer.truncate(overlap);
+            *this.consumed = overlap;
+        }
+        // Read at least until self.buffer.len() > overlap so we can do one scan
+        loop {
+            let filled = {
+                let mut buf = ReadBuf::uninit(this.buffer.spare_capacity_mut());
+                ready!(this.reader.as_mut().poll_read(cx, &mut buf))?;
+                buf.filled().len()
+            };
+            // SAFETY: We just read `filled` amount of data above
+            unsafe {
+                this.buffer.set_len(filled + this.buffer.len());
+            }
+            if filled == 0 || this.buffer.len() > overlap {
+                break;
+            }
+        }
+
+        #[allow(clippy::needless_borrows_for_generic_args)] // misfiring lint (breaks code below)
+        this.scanner.scan(&this.buffer);
+
+        Poll::Ready(Ok(&this.buffer[*this.consumed..]))
+    }
+
+    fn consume(self: Pin<&mut Self>, amt: usize) {
+        debug_assert!(self.consumed + amt <= self.buffer.len());
+        let this = self.project();
+        *this.consumed += amt;
     }
 }
 
 #[cfg(test)]
 mod tests {
+    use rstest::rstest;
+    use tokio::io::AsyncReadExt as _;
+    use tokio_test::io::Builder;
+
     use super::*;
 
     // The actual derivation of `nixpkgs.hello`.
@@ -112,4 +300,40 @@ mod tests {
             assert!(result.contains(c));
         }
     }
+
+    #[rstest]
+    #[case::normal(8096, 8096)]
+    #[case::small_capacity(8096, 1)]
+    #[case::small_read(1, 8096)]
+    #[case::all_small(1, 1)]
+    #[tokio::test]
+    async fn test_reference_reader(#[case] chunk_size: usize, #[case] capacity: usize) {
+        let candidates = vec![
+            // these exist in the drv:
+            "33l4p0pn0mybmqzaxfkpppyh7vx1c74p",
+            "pf80kikyxr63wrw56k00i1kw6ba76qik",
+            "cp65c8nk29qq5cl1wyy5qyw103cwmax7",
+            // this doesn't:
+            "fn7zvafq26f0c8b17brs7s95s10ibfzs",
+        ];
+        let pattern = ReferencePattern::new(candidates.clone());
+        let mut mock = Builder::new();
+        for c in HELLO_DRV.as_bytes().chunks(chunk_size) {
+            mock.read(c);
+        }
+        let mock = mock.build();
+        let mut reader = ReferenceReader::with_capacity(capacity, pattern, mock);
+        let mut s = String::new();
+        reader.read_to_string(&mut s).await.unwrap();
+        assert_eq!(s, HELLO_DRV);
+
+        let result = reader.finalise();
+        assert_eq!(result.len(), 3);
+
+        for c in candidates[..3].iter() {
+            assert!(result.contains(c));
+        }
+    }
+
+    // FUTUREWORK: Test with large file
 }
diff --git a/tvix/eval/src/tests/tvix_tests/readDir/foo/.keep b/tvix/glue/src/tests/empty-file
index e69de29bb2d1..e69de29bb2d1 100644
--- a/tvix/eval/src/tests/tvix_tests/readDir/foo/.keep
+++ b/tvix/glue/src/tests/empty-file
diff --git a/tvix/glue/src/tests/mod.rs b/tvix/glue/src/tests/mod.rs
index 9fe0c222709a..f68ea5425899 100644
--- a/tvix/glue/src/tests/mod.rs
+++ b/tvix/glue/src/tests/mod.rs
@@ -1,10 +1,11 @@
 use std::{rc::Rc, sync::Arc};
 
+use clap::Parser;
 use pretty_assertions::assert_eq;
 use std::path::PathBuf;
 use tvix_build::buildservice::DummyBuildService;
 use tvix_eval::{EvalIO, Value};
-use tvix_store::utils::construct_services;
+use tvix_store::utils::{construct_services, ServiceUrlsMemory};
 
 use rstest::rstest;
 
@@ -35,28 +36,32 @@ fn eval_test(code_path: PathBuf, expect_success: bool) {
     let tokio_runtime = tokio::runtime::Runtime::new().unwrap();
     let (blob_service, directory_service, path_info_service, nar_calculation_service) =
         tokio_runtime
-            .block_on(async { construct_services("memory://", "memory://", "memory://").await })
+            .block_on(async {
+                construct_services(ServiceUrlsMemory::parse_from(std::iter::empty::<&str>())).await
+            })
             .unwrap();
 
     let tvix_store_io = Rc::new(TvixStoreIO::new(
         blob_service,
         directory_service,
-        path_info_service.into(),
+        path_info_service,
         nar_calculation_service.into(),
         Arc::new(DummyBuildService::default()),
         tokio_runtime.handle().clone(),
     ));
     // Wrap with TvixIO, so <nix/fetchurl.nix can be imported.
-    let mut eval = tvix_eval::Evaluation::new(
-        Box::new(TvixIO::new(tvix_store_io.clone() as Rc<dyn EvalIO>)) as Box<dyn EvalIO>,
-        true,
-    );
+    let mut eval_builder = tvix_eval::Evaluation::builder(Box::new(TvixIO::new(
+        tvix_store_io.clone() as Rc<dyn EvalIO>,
+    )) as Box<dyn EvalIO>)
+    .enable_import()
+    .strict();
+
+    eval_builder = add_derivation_builtins(eval_builder, Rc::clone(&tvix_store_io));
+    eval_builder = add_fetcher_builtins(eval_builder, Rc::clone(&tvix_store_io));
+    eval_builder = add_import_builtins(eval_builder, tvix_store_io);
+    eval_builder = configure_nix_path(eval_builder, &None);
 
-    eval.strict = true;
-    add_derivation_builtins(&mut eval, tvix_store_io.clone());
-    add_fetcher_builtins(&mut eval, tvix_store_io.clone());
-    add_import_builtins(&mut eval, tvix_store_io.clone());
-    configure_nix_path(&mut eval, &None);
+    let eval = eval_builder.build();
 
     let result = eval.evaluate(code, Some(code_path.clone()));
     let failed = match result.value {
@@ -145,3 +150,11 @@ fn nix_eval_okay(#[files("src/tests/nix_tests/eval-okay-*.nix")] code_path: Path
 // ) {
 //     eval_test(code_path, false)
 // }
+
+// eval-fail-* tests contain a snippet of Nix code, which is
+// expected to fail evaluation.  The exact type of failure
+// (assertion, parse error, etc) is not currently checked.
+#[rstest]
+fn eval_fail(#[files("src/tests/tvix_tests/eval-fail-*.nix")] code_path: PathBuf) {
+    eval_test(code_path, false)
+}
diff --git a/tvix/glue/src/tests/tvix_tests/eval-fail-fetchtarball-invalid-attrs.nix b/tvix/glue/src/tests/tvix_tests/eval-fail-fetchtarball-invalid-attrs.nix
new file mode 100644
index 000000000000..209f58cc9d0c
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-fail-fetchtarball-invalid-attrs.nix
@@ -0,0 +1,5 @@
+(builtins.fetchTarball {
+  url = "https://test.example/owo";
+  # Only "sha256" is accepted here.
+  hash = "sha256-Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=";
+})
diff --git a/tvix/glue/src/tests/tvix_tests/eval-fail-fetchtarball-invalid-url.nix b/tvix/glue/src/tests/tvix_tests/eval-fail-fetchtarball-invalid-url.nix
new file mode 100644
index 000000000000..32596ddcd5fb
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-fail-fetchtarball-invalid-url.nix
@@ -0,0 +1 @@
+(builtins.fetchTarball /dev/null)
diff --git a/tvix/glue/src/tests/tvix_tests/eval-fail-fetchurl-invalid-attrs.nix b/tvix/glue/src/tests/tvix_tests/eval-fail-fetchurl-invalid-attrs.nix
new file mode 100644
index 000000000000..d3c2bed8018e
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-fail-fetchurl-invalid-attrs.nix
@@ -0,0 +1,5 @@
+(builtins.fetchurl {
+  url = "https://test.example/owo";
+  # Only "sha256" is accepted here.
+  hash = "sha256-Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=";
+})
diff --git a/tvix/glue/src/tests/tvix_tests/eval-fail-fetchurl-invalid-url.nix b/tvix/glue/src/tests/tvix_tests/eval-fail-fetchurl-invalid-url.nix
new file mode 100644
index 000000000000..dc3f70b998d3
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-fail-fetchurl-invalid-url.nix
@@ -0,0 +1 @@
+(builtins.fetchurl /dev/null)
diff --git a/tvix/glue/src/tests/tvix_tests/eval-fail-tofile-wrongctxtype.nix b/tvix/glue/src/tests/tvix_tests/eval-fail-tofile-wrongctxtype.nix
new file mode 100644
index 000000000000..60c94818ed25
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-fail-tofile-wrongctxtype.nix
@@ -0,0 +1,3 @@
+# in 'toFile': the file 'foo' cannot refer to derivation outputs, at (string):1:1
+builtins.toFile "foo" "${(builtins.derivation {name = "foo"; builder = ":"; system = ":";})}"
+
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-context-introspection.nix b/tvix/glue/src/tests/tvix_tests/eval-okay-context-introspection.nix
index ecd8ab0073d0..e5719e00c3ae 100644
--- a/tvix/glue/src/tests/tvix_tests/eval-okay-context-introspection.nix
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-context-introspection.nix
@@ -71,7 +71,7 @@ in
   (builtins.hasAttr "allOutputs" (builtins.getContext drv.drvPath)."${builtins.unsafeDiscardStringContext drv.drvPath}")
   (legit-context == desired-context) # FIXME(raitobezarius): this should not use `builtins.seq`, this is a consequence of excessive laziness of Tvix, I believe.
   (reconstructed-path == combo-path)
-  # Those are too slow?
+  # These still fail with an internal error
   # (etaRule' "foo")
   # (etaRule' combo-path)
   (etaRule "foo")
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.exp b/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.exp
index 37a04d577c59..50d8e3574bb8 100644
--- a/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.exp
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.exp
@@ -1 +1 @@
-[ /nix/store/y0r1p1cqmlvm0yqkz3gxvkc1p8kg2sz8-null /nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch /nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch /nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch ]
+[ /nix/store/y0r1p1cqmlvm0yqkz3gxvkc1p8kg2sz8-null /nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch /nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch /nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch "/nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch" "/nix/store/b40vjphshq4fdgv8s3yrp0bdlafi4920-0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz" "/nix/store/8kx7fdkdbzs4fkfb57xq0cbhs20ymq2n-0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz" "/nix/store/y92hm2xfk1009hrq0ix80j4m5k4j4w21-0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz" ]
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.nix b/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.nix
index 8a3910152554..a3e9c5486968 100644
--- a/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.nix
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.nix
@@ -22,4 +22,44 @@
     name = "notmuch-extract-patch";
     sha256 = "sha256-Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=";
   })
+
+  # The following tests use <nix/fetchurl.nix>.
+  # This is a piece of Nix code producing a "fake derivation" which gets
+  # handled by a "custom builder" that does the actual fetching.
+  # We access `.outPath` here, as the current string output of a Derivation
+  # still differs from the way nix presents it.
+  # It behaves similar to builtins.fetchurl, except it requires a hash to be
+  # provided upfront.
+  # If `unpack` is set to true, it can unpack NAR files (decompressing if
+  # necessary)
+  # If `executable` is set to true, it will place the fetched file at the root,
+  # but make it executable, and the hash is on the NAR representation.
+
+  # Fetch a URL.
+  (import <nix/fetchurl.nix> {
+    url = "https://test.example/owo";
+    name = "notmuch-extract-patch";
+    sha256 = "Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=";
+  }).outPath
+
+  # Fetch a NAR and unpack it, specifying the sha256 of its NAR representation.
+  (import <nix/fetchurl.nix> {
+    url = "https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz";
+    sha256 = "sha256-oj6yfWKbcEerK8D9GdPJtIAOveNcsH1ztGeSARGypRA=";
+    unpack = true;
+  }).outPath
+
+  # Fetch a NAR and unpack it, specifying its *sha1* of its NAR representation.
+  (import <nix/fetchurl.nix> {
+    url = "https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz";
+    hash = "sha1-F/fMsgwkXF8fPCg1v9zPZ4yOFIA=";
+    unpack = true;
+  }).outPath
+
+  # Fetch a URL, specifying the *sha1* of a NAR describing it as executable at the root.
+  (import <nix/fetchurl.nix> {
+    url = "https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz";
+    hash = "sha1-NKNeU1csW5YJ4lCeWH3Z/apppNU=";
+    executable = true;
+  }).outPath
 ]
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-storePath.exp b/tvix/glue/src/tests/tvix_tests/eval-okay-storePath.exp
new file mode 100644
index 000000000000..e7d20f6631b1
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-storePath.exp
@@ -0,0 +1 @@
+{ contextMatches = true; hasContext = true; }
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-storePath.nix b/tvix/glue/src/tests/tvix_tests/eval-okay-storePath.nix
new file mode 100644
index 000000000000..99205cb9e028
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-storePath.nix
@@ -0,0 +1,9 @@
+let
+  path = builtins.unsafeDiscardStringContext "${../empty-file}";
+  storePath = builtins.storePath path;
+  context = builtins.getContext storePath;
+in
+{
+  hasContext = builtins.hasContext storePath;
+  contextMatches = context == { "${path}" = { path = true; }; };
+}
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-tofile.exp b/tvix/glue/src/tests/tvix_tests/eval-okay-tofile.exp
new file mode 100644
index 000000000000..c8e5b8fab5d9
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-tofile.exp
@@ -0,0 +1 @@
+[ "/nix/store/vxjiwkjkn7x4079qvh1jkl5pn05j2aw0-foo" "/nix/store/i7liwr52956m86kxp7dgbcwsk56r27v6-foo" "/nix/store/yw8k7ixk1zvb113p4y0bl3ahjxd5h9sr-foo" { "/nix/store/yw8k7ixk1zvb113p4y0bl3ahjxd5h9sr-foo" = { path = true; }; } ]
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-tofile.nix b/tvix/glue/src/tests/tvix_tests/eval-okay-tofile.nix
new file mode 100644
index 000000000000..141bbc38ec11
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-tofile.nix
@@ -0,0 +1,11 @@
+let
+  noContext = (builtins.toFile "foo" "bar");
+  someContext = (builtins.toFile "foo" "bar${noContext}");
+  moreContext = (builtins.toFile "foo" "bar${someContext}");
+in
+[
+  noContext
+  someContext
+  moreContext
+  (builtins.getContext moreContext)
+]
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.exp b/tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.exp
new file mode 100644
index 000000000000..e9600ecdad7a
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.exp
@@ -0,0 +1 @@
+[ { "/nix/store/y1s2fiq89v2h9vkb38w508ir20dwv6v2-test.drv" = { allOutputs = true; }; } false ]
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.nix b/tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.nix
new file mode 100644
index 000000000000..933aa46022dd
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.nix
@@ -0,0 +1,14 @@
+[
+  # builtins.toXML retains context where there is.
+  (builtins.getContext (builtins.toXML {
+    inherit (derivation {
+      name = "test";
+      builder = "/bin/sh";
+      system = builtins.currentSystem;
+    }) drvPath;
+  }))
+
+  # this should have no context.
+  (builtins.hasContext
+    (builtins.toXML { }))
+]
diff --git a/tvix/glue/src/tvix_build.rs b/tvix/glue/src/tvix_build.rs
index e9eb1725ef3e..ae01351503f2 100644
--- a/tvix/glue/src/tvix_build.rs
+++ b/tvix/glue/src/tvix_build.rs
@@ -1,7 +1,7 @@
 //! This module contains glue code translating from
 //! [nix_compat::derivation::Derivation] to [tvix_build::proto::BuildRequest].
 
-use std::collections::{BTreeMap, BTreeSet};
+use std::collections::BTreeMap;
 
 use bytes::Bytes;
 use nix_compat::{derivation::Derivation, nixbase32};
@@ -10,7 +10,7 @@ use tvix_build::proto::{
     build_request::{AdditionalFile, BuildConstraints, EnvVar},
     BuildRequest,
 };
-use tvix_castore::proto::{self, node::Node};
+use tvix_castore::Node;
 
 /// These are the environment variables that Nix sets in its sandbox for every
 /// build.
@@ -36,10 +36,9 @@ const NIX_ENVIRONMENT_VARS: [(&str, &str); 12] = [
 ///   (`fn_input_sources_to_node`)
 /// - one translating a tuple of drv path and (a subset of their) output names to
 ///   castore nodes of the selected outpus (`fn_input_drvs_to_output_nodes`).
-#[allow(clippy::mutable_key_type)]
 pub(crate) fn derivation_to_build_request(
     derivation: &Derivation,
-    inputs: BTreeSet<Node>,
+    inputs: BTreeMap<bytes::Bytes, Node>,
 ) -> std::io::Result<BuildRequest> {
     debug_assert!(derivation.validate(true).is_ok(), "drv must validate");
 
@@ -111,7 +110,7 @@ pub(crate) fn derivation_to_build_request(
             .collect(),
         inputs: inputs
             .into_iter()
-            .map(|n| proto::Node { node: Some(n) })
+            .map(|(name, node)| tvix_castore::proto::Node::from_name_and_node(name, node))
             .collect(),
         inputs_dir: nix_compat::store_path::STORE_DIR[1..].into(),
         constraints,
@@ -192,18 +191,15 @@ fn calculate_pass_as_file_env(k: &str) -> (String, String) {
 
 #[cfg(test)]
 mod test {
-    use std::collections::BTreeSet;
-
     use bytes::Bytes;
     use nix_compat::derivation::Derivation;
+    use std::collections::BTreeMap;
     use tvix_build::proto::{
         build_request::{AdditionalFile, BuildConstraints, EnvVar},
         BuildRequest,
     };
-    use tvix_castore::{
-        fixtures::DUMMY_DIGEST,
-        proto::{self, node::Node, DirectoryNode},
-    };
+    use tvix_castore::fixtures::DUMMY_DIGEST;
+    use tvix_castore::Node;
 
     use crate::tvix_build::NIX_ENVIRONMENT_VARS;
 
@@ -211,11 +207,11 @@ mod test {
     use lazy_static::lazy_static;
 
     lazy_static! {
-        static ref INPUT_NODE_FOO: Node = Node::Directory(DirectoryNode {
-            name: Bytes::from("mp57d33657rf34lzvlbpfa1gjfv5gmpg-bar"),
-            digest: DUMMY_DIGEST.clone().into(),
-            size: 42,
-        });
+        static ref INPUT_NODE_FOO_NAME: Bytes = "mp57d33657rf34lzvlbpfa1gjfv5gmpg-bar".into();
+        static ref INPUT_NODE_FOO: Node = Node::Directory {
+            digest: DUMMY_DIGEST.clone(),
+            size: 42
+        };
     }
 
     #[test]
@@ -224,9 +220,11 @@ mod test {
 
         let derivation = Derivation::from_aterm_bytes(aterm_bytes).expect("must parse");
 
-        let build_request =
-            derivation_to_build_request(&derivation, BTreeSet::from([INPUT_NODE_FOO.clone()]))
-                .expect("must succeed");
+        let build_request = derivation_to_build_request(
+            &derivation,
+            BTreeMap::from([(INPUT_NODE_FOO_NAME.clone(), INPUT_NODE_FOO.clone())]),
+        )
+        .expect("must succeed");
 
         let mut expected_environment_vars = vec![
             EnvVar {
@@ -263,9 +261,10 @@ mod test {
                 command_args: vec![":".into()],
                 outputs: vec!["nix/store/fhaj6gmwns62s6ypkcldbaj2ybvkhx3p-foo".into()],
                 environment_vars: expected_environment_vars,
-                inputs: vec![proto::Node {
-                    node: Some(INPUT_NODE_FOO.clone())
-                }],
+                inputs: vec![tvix_castore::proto::Node::from_name_and_node(
+                    INPUT_NODE_FOO_NAME.clone(),
+                    INPUT_NODE_FOO.clone()
+                )],
                 inputs_dir: "nix/store".into(),
                 constraints: Some(BuildConstraints {
                     system: derivation.system.clone(),
@@ -289,7 +288,7 @@ mod test {
         let derivation = Derivation::from_aterm_bytes(aterm_bytes).expect("must parse");
 
         let build_request =
-            derivation_to_build_request(&derivation, BTreeSet::from([])).expect("must succeed");
+            derivation_to_build_request(&derivation, BTreeMap::from([])).expect("must succeed");
 
         let mut expected_environment_vars = vec![
             EnvVar {
@@ -359,7 +358,7 @@ mod test {
         let derivation = Derivation::from_aterm_bytes(aterm_bytes).expect("must parse");
 
         let build_request =
-            derivation_to_build_request(&derivation, BTreeSet::from([])).expect("must succeed");
+            derivation_to_build_request(&derivation, BTreeMap::from([])).expect("must succeed");
 
         let mut expected_environment_vars = vec![
             // Note how bar and baz are not present in the env anymore,
diff --git a/tvix/glue/src/tvix_io.rs b/tvix/glue/src/tvix_io.rs
index 0e5f23b99093..323fa8d20ccb 100644
--- a/tvix/glue/src/tvix_io.rs
+++ b/tvix/glue/src/tvix_io.rs
@@ -1,12 +1,10 @@
 //! This module implements a wrapper around tvix-eval's [EvalIO] type,
 //! adding functionality which is required by tvix-cli:
 //!
-//! 1. Marking plain paths known to the reference scanner.
-//! 2. Handling the C++ Nix `__corepkgs__`-hack for nixpkgs bootstrapping.
+//! 1. Handling the C++ Nix `__corepkgs__`-hack for nixpkgs bootstrapping.
 //!
 //! All uses of [EvalIO] in tvix-cli must make use of this wrapper,
-//! otherwise fundamental features like nixpkgs bootstrapping and hash
-//! calculation will not work.
+//! otherwise nixpkgs bootstrapping will not work.
 
 use std::io::{self, Cursor};
 use std::path::{Path, PathBuf};
@@ -60,6 +58,10 @@ where
         self.actual.as_ref().open(path)
     }
 
+    fn file_type(&self, path: &Path) -> io::Result<FileType> {
+        self.actual.as_ref().file_type(path)
+    }
+
     fn read_dir(&self, path: &Path) -> io::Result<Vec<(bytes::Bytes, FileType)>> {
         self.actual.as_ref().read_dir(path)
     }
diff --git a/tvix/glue/src/tvix_store_io.rs b/tvix/glue/src/tvix_store_io.rs
index 7b8ef3ff0ac2..93675b438f25 100644
--- a/tvix/glue/src/tvix_store_io.rs
+++ b/tvix/glue/src/tvix_store_io.rs
@@ -1,31 +1,27 @@
 //! This module provides an implementation of EvalIO talking to tvix-store.
-
 use bytes::Bytes;
 use futures::{StreamExt, TryStreamExt};
 use nix_compat::nixhash::NixHash;
 use nix_compat::store_path::StorePathRef;
 use nix_compat::{nixhash::CAHash, store_path::StorePath};
-use sha2::{Digest, Sha256};
+use std::collections::BTreeMap;
 use std::{
     cell::RefCell,
-    collections::BTreeSet,
     io,
     path::{Path, PathBuf},
     sync::Arc,
 };
 use tokio_util::io::SyncIoBridge;
-use tracing::{error, info, instrument, warn, Level};
+use tracing::{error, instrument, warn, Level, Span};
+use tracing_indicatif::span_ext::IndicatifSpanExt;
 use tvix_build::buildservice::BuildService;
-use tvix_castore::proto::node::Node;
 use tvix_eval::{EvalIO, FileType, StdIO};
 use tvix_store::nar::NarCalculationService;
-use tvix_store::utils::AsyncIoBridge;
 
 use tvix_castore::{
     blobservice::BlobService,
     directoryservice::{self, DirectoryService},
-    proto::NamedNode,
-    B3Digest,
+    Node,
 };
 use tvix_store::{pathinfoservice::PathInfoService, proto::PathInfo};
 
@@ -69,7 +65,7 @@ pub struct TvixStoreIO {
     >,
 
     // Paths known how to produce, by building or fetching.
-    pub(crate) known_paths: RefCell<KnownPaths>,
+    pub known_paths: RefCell<KnownPaths>,
 }
 
 impl TvixStoreIO {
@@ -106,10 +102,10 @@ impl TvixStoreIO {
     ///
     /// In case there is no PathInfo yet, this means we need to build it
     /// (which currently is stubbed out still).
-    #[instrument(skip(self, store_path), fields(store_path=%store_path), ret(level = Level::TRACE), err)]
+    #[instrument(skip(self, store_path), fields(store_path=%store_path, indicatif.pb_show=1), ret(level = Level::TRACE), err)]
     async fn store_path_to_node(
         &self,
-        store_path: &StorePath,
+        store_path: &StorePath<String>,
         sub_path: &Path,
     ) -> io::Result<Option<Node>> {
         // Find the root node for the store_path.
@@ -124,7 +120,22 @@ impl TvixStoreIO {
             .await?
         {
             // if we have a PathInfo, we know there will be a root_node (due to validation)
-            Some(path_info) => path_info.node.expect("no node").node.expect("no node"),
+            // TODO: use stricter typed BuildRequest here.
+            Some(path_info) => {
+                let (name, node) = path_info
+                    .node
+                    .expect("no node")
+                    .into_name_and_node()
+                    .expect("invalid node");
+
+                assert_eq!(
+                    store_path.to_string().as_bytes(),
+                    name.as_ref(),
+                    "returned node basename must match requested store path"
+                );
+
+                node
+            }
             // If there's no PathInfo found, this normally means we have to
             // trigger the build (and insert into PathInfoService, after
             // reference scanning).
@@ -138,7 +149,8 @@ impl TvixStoreIO {
                 // The store path doesn't exist yet, so we need to fetch or build it.
                 // We check for fetches first, as we might have both native
                 // fetchers and FODs in KnownPaths, and prefer the former.
-
+                // This will also find [Fetch] synthesized from
+                // `builtin:fetchurl` Derivations.
                 let maybe_fetch = self
                     .known_paths
                     .borrow()
@@ -146,7 +158,6 @@ impl TvixStoreIO {
 
                 match maybe_fetch {
                     Some((name, fetch)) => {
-                        info!(?fetch, "triggering lazy fetch");
                         let (sp, root_node) = self
                             .fetcher
                             .ingest_and_persist(&name, fetch)
@@ -156,9 +167,9 @@ impl TvixStoreIO {
                         })?;
 
                         debug_assert_eq!(
-                            sp.to_string(),
-                            store_path.to_string(),
-                            "store path returned from fetcher should match"
+                            sp.to_absolute_path(),
+                            store_path.as_ref().to_absolute_path(),
+                            "store path returned from fetcher must match store path we have in fetchers"
                         );
 
                         root_node
@@ -179,14 +190,15 @@ impl TvixStoreIO {
                                 }
                             }
                         };
-
-                        warn!("triggering build");
+                        let span = Span::current();
+                        span.pb_start();
+                        span.pb_set_style(&tvix_tracing::PB_SPINNER_STYLE);
+                        span.pb_set_message(&format!("โณWaiting for inputs {}", &store_path));
 
                         // derivation_to_build_request needs castore nodes for all inputs.
                         // Provide them, which means, here is where we recursively build
                         // all dependencies.
-                        #[allow(clippy::mutable_key_type)]
-                        let input_nodes: BTreeSet<Node> =
+                        let mut inputs: BTreeMap<Bytes, Node> =
                             futures::stream::iter(drv.input_derivations.iter())
                                 .map(|(input_drv_path, output_names)| {
                                     // look up the derivation object
@@ -201,7 +213,7 @@ impl TvixStoreIO {
                                     };
 
                                     // convert output names to actual paths
-                                    let output_paths: Vec<StorePath> = output_names
+                                    let output_paths: Vec<StorePath<String>> = output_names
                                         .iter()
                                         .map(|output_name| {
                                             input_drv
@@ -214,6 +226,7 @@ impl TvixStoreIO {
                                                 .clone()
                                         })
                                         .collect();
+
                                     // For each output, ask for the castore node.
                                     // We're in a per-derivation context, so if they're
                                     // not built yet they'll all get built together.
@@ -228,7 +241,7 @@ impl TvixStoreIO {
                                                 .await?;
 
                                             if let Some(node) = node {
-                                                Ok(node)
+                                                Ok((output_path.to_string().into(), node))
                                             } else {
                                                 Err(io::Error::other("no node produced"))
                                             }
@@ -236,16 +249,44 @@ impl TvixStoreIO {
                                     )
                                 })
                                 .flatten()
-                                .buffer_unordered(10) // TODO: make configurable
+                                .buffer_unordered(
+                                    1, /* TODO: increase again once we prevent redundant fetches */
+                                ) // TODO: make configurable
                                 .try_collect()
                                 .await?;
 
+                        // FUTUREWORK: merge these who things together
+                        // add input sources
+                        let input_sources: BTreeMap<_, _> =
+                            futures::stream::iter(drv.input_sources.iter())
+                                .then(|input_source| {
+                                    Box::pin({
+                                        let input_source = input_source.clone();
+                                        async move {
+                                            let node = self
+                                                .store_path_to_node(&input_source, Path::new(""))
+                                                .await?;
+                                            if let Some(node) = node {
+                                                Ok((input_source.to_string().into(), node))
+                                            } else {
+                                                Err(io::Error::other("no node produced"))
+                                            }
+                                        }
+                                    })
+                                })
+                                .try_collect()
+                                .await?;
+
+                        inputs.extend(input_sources);
+
+                        span.pb_set_message(&format!("๐Ÿ”จBuilding {}", &store_path));
+
                         // TODO: check if input sources are sufficiently dealth with,
                         // I think yes, they must be imported into the store by other
                         // operations, so dealt with in the Some(โ€ฆ) match arm
 
                         // synthesize the build request.
-                        let build_request = derivation_to_build_request(&drv, input_nodes)?;
+                        let build_request = derivation_to_build_request(&drv, inputs)?;
 
                         // create a build
                         let build_result = self
@@ -255,23 +296,25 @@ impl TvixStoreIO {
                             .await
                             .map_err(|e| std::io::Error::new(io::ErrorKind::Other, e))?;
 
-                        // TODO: refscan?
+                        // TODO: refscan
 
                         // For each output, insert a PathInfo.
                         for output in &build_result.outputs {
-                            let root_node = output.node.as_ref().expect("invalid root node");
+                            let (output_name, output_node) =
+                                output.clone().into_name_and_node().expect("invalid node");
 
                             // calculate the nar representation
                             let (nar_size, nar_sha256) = self
                                 .nar_calculation_service
-                                .calculate_nar(root_node)
+                                .calculate_nar(&output_node)
                                 .await?;
 
                             // assemble the PathInfo to persist
                             let path_info = PathInfo {
-                                node: Some(tvix_castore::proto::Node {
-                                    node: Some(root_node.clone()),
-                                }),
+                                node: Some(tvix_castore::proto::Node::from_name_and_node(
+                                    output_name.into(),
+                                    output_node,
+                                )),
                                 references: vec![], // TODO: refscan
                                 narinfo: Some(tvix_store::proto::NarInfo {
                                     nar_size,
@@ -304,16 +347,17 @@ impl TvixStoreIO {
                         }
 
                         // find the output for the store path requested
+                        let s = store_path.to_string();
+
                         build_result
                             .outputs
                             .into_iter()
-                            .find(|output_node| {
-                                output_node.node.as_ref().expect("invalid node").get_name()
-                                    == store_path.to_string().as_bytes()
+                            .map(|e| e.into_name_and_node().expect("invalid node"))
+                            .find(|(output_name, _output_node)| {
+                                output_name.as_ref() == s.as_bytes()
                             })
                             .expect("build didn't produce the store path")
-                            .node
-                            .expect("invalid node")
+                            .1
                     }
                 }
             }
@@ -328,13 +372,13 @@ impl TvixStoreIO {
             .map_err(|e| std::io::Error::new(io::ErrorKind::Other, e))
     }
 
-    pub(crate) async fn node_to_path_info(
+    pub(crate) async fn node_to_path_info<'a>(
         &self,
-        name: &str,
+        name: &'a str,
         path: &Path,
-        ca: CAHash,
+        ca: &CAHash,
         root_node: Node,
-    ) -> io::Result<(PathInfo, NixHash, StorePath)> {
+    ) -> io::Result<(PathInfo, NixHash, StorePathRef<'a>)> {
         // Ask the PathInfoService for the NAR size and sha256
         // We always need it no matter what is the actual hash mode
         // because the path info construct a narinfo which *always*
@@ -347,7 +391,7 @@ impl TvixStoreIO {
 
         // Calculate the output path. This might still fail, as some names are illegal.
         let output_path =
-            nix_compat::store_path::build_ca_path(name, &ca, Vec::<String>::new(), false).map_err(
+            nix_compat::store_path::build_ca_path(name, ca, Vec::<String>::new(), false).map_err(
                 |_| {
                     std::io::Error::new(
                         std::io::ErrorKind::InvalidData,
@@ -356,53 +400,33 @@ impl TvixStoreIO {
                 },
             )?;
 
-        // assemble a new root_node with a name that is derived from the nar hash.
-        let root_node = root_node.rename(output_path.to_string().into_bytes().into());
-        tvix_store::import::log_node(&root_node, path);
+        tvix_store::import::log_node(name.as_bytes(), &root_node, path);
 
-        let path_info =
-            tvix_store::import::derive_nar_ca_path_info(nar_size, nar_sha256, Some(ca), root_node);
+        // construct a PathInfo
+        let path_info = tvix_store::import::derive_nar_ca_path_info(
+            nar_size,
+            nar_sha256,
+            Some(ca),
+            output_path.to_string().into(),
+            root_node,
+        );
 
-        Ok((
-            path_info,
-            NixHash::Sha256(nar_sha256),
-            output_path.to_owned(),
-        ))
+        Ok((path_info, NixHash::Sha256(nar_sha256), output_path))
     }
 
-    pub(crate) async fn register_node_in_path_info_service(
+    pub(crate) async fn register_node_in_path_info_service<'a>(
         &self,
-        name: &str,
+        name: &'a str,
         path: &Path,
-        ca: CAHash,
+        ca: &CAHash,
         root_node: Node,
-    ) -> io::Result<StorePath> {
+    ) -> io::Result<StorePathRef<'a>> {
         let (path_info, _, output_path) = self.node_to_path_info(name, path, ca, root_node).await?;
         let _path_info = self.path_info_service.as_ref().put(path_info).await?;
 
         Ok(output_path)
     }
 
-    /// Transforms a BLAKE-3 digest into a SHA256 digest
-    /// by re-hashing the whole file.
-    pub(crate) async fn blob_to_sha256_hash(&self, blob_digest: B3Digest) -> io::Result<[u8; 32]> {
-        let mut reader = self
-            .blob_service
-            .open_read(&blob_digest)
-            .await?
-            .ok_or_else(|| {
-                io::Error::new(
-                    io::ErrorKind::NotFound,
-                    format!("blob represented by digest: '{}' not found", blob_digest),
-                )
-            })?;
-        // It is fine to use `AsyncIoBridge` here because hashing is not actually I/O.
-        let mut hasher = AsyncIoBridge(Sha256::new());
-
-        tokio::io::copy(&mut reader, &mut hasher).await?;
-        Ok(hasher.0.finalize().into())
-    }
-
     pub async fn store_path_exists<'a>(&'a self, store_path: StorePathRef<'a>) -> io::Result<bool> {
         Ok(self
             .path_info_service
@@ -421,7 +445,7 @@ impl EvalIO for TvixStoreIO {
         {
             if self
                 .tokio_handle
-                .block_on(async { self.store_path_to_node(&store_path, &sub_path).await })?
+                .block_on(self.store_path_to_node(&store_path, sub_path))?
                 .is_some()
             {
                 Ok(true)
@@ -443,30 +467,18 @@ impl EvalIO for TvixStoreIO {
         {
             if let Some(node) = self
                 .tokio_handle
-                .block_on(async { self.store_path_to_node(&store_path, &sub_path).await })?
+                .block_on(async { self.store_path_to_node(&store_path, sub_path).await })?
             {
                 // depending on the node type, treat open differently
                 match node {
-                    Node::Directory(_) => {
+                    Node::Directory { .. } => {
                         // This would normally be a io::ErrorKind::IsADirectory (still unstable)
                         Err(io::Error::new(
                             io::ErrorKind::Unsupported,
                             format!("tried to open directory at {:?}", path),
                         ))
                     }
-                    Node::File(file_node) => {
-                        let digest: B3Digest =
-                            file_node.digest.clone().try_into().map_err(|_e| {
-                                error!(
-                                    file_node = ?file_node,
-                                    "invalid digest"
-                                );
-                                io::Error::new(
-                                    io::ErrorKind::InvalidData,
-                                    format!("invalid digest length in file node: {:?}", file_node),
-                                )
-                            })?;
-
+                    Node::File { digest, .. } => {
                         self.tokio_handle.block_on(async {
                             let resp = self.blob_service.as_ref().open_read(&digest).await?;
                             match resp {
@@ -488,7 +500,7 @@ impl EvalIO for TvixStoreIO {
                             }
                         })
                     }
-                    Node::Symlink(_symlink_node) => Err(io::Error::new(
+                    Node::Symlink { .. } => Err(io::Error::new(
                         io::ErrorKind::Unsupported,
                         "open for symlinks is unsupported",
                     ))?,
@@ -505,37 +517,49 @@ impl EvalIO for TvixStoreIO {
     }
 
     #[instrument(skip(self), ret(level = Level::TRACE), err)]
+    fn file_type(&self, path: &Path) -> io::Result<FileType> {
+        if let Ok((store_path, sub_path)) =
+            StorePath::from_absolute_path_full(&path.to_string_lossy())
+        {
+            if let Some(node) = self
+                .tokio_handle
+                .block_on(async { self.store_path_to_node(&store_path, sub_path).await })?
+            {
+                match node {
+                    Node::Directory { .. } => Ok(FileType::Directory),
+                    Node::File { .. } => Ok(FileType::Regular),
+                    Node::Symlink { .. } => Ok(FileType::Symlink),
+                }
+            } else {
+                self.std_io.file_type(path)
+            }
+        } else {
+            self.std_io.file_type(path)
+        }
+    }
+
+    #[instrument(skip(self), ret(level = Level::TRACE), err)]
     fn read_dir(&self, path: &Path) -> io::Result<Vec<(bytes::Bytes, FileType)>> {
         if let Ok((store_path, sub_path)) =
             StorePath::from_absolute_path_full(&path.to_string_lossy())
         {
             if let Some(node) = self
                 .tokio_handle
-                .block_on(async { self.store_path_to_node(&store_path, &sub_path).await })?
+                .block_on(async { self.store_path_to_node(&store_path, sub_path).await })?
             {
                 match node {
-                    Node::Directory(directory_node) => {
+                    Node::Directory { digest, .. } => {
                         // fetch the Directory itself.
-                        let digest: B3Digest =
-                            directory_node.digest.clone().try_into().map_err(|_e| {
-                                io::Error::new(
-                                    io::ErrorKind::InvalidData,
-                                    format!(
-                                        "invalid digest length in directory node: {:?}",
-                                        directory_node
-                                    ),
-                                )
-                            })?;
-
-                        if let Some(directory) = self.tokio_handle.block_on(async {
-                            self.directory_service.as_ref().get(&digest).await
+                        if let Some(directory) = self.tokio_handle.block_on({
+                            let digest = digest.clone();
+                            async move { self.directory_service.as_ref().get(&digest).await }
                         })? {
                             let mut children: Vec<(bytes::Bytes, FileType)> = Vec::new();
-                            for node in directory.nodes() {
+                            for (name, node) in directory.into_nodes() {
                                 children.push(match node {
-                                    Node::Directory(e) => (e.name, FileType::Directory),
-                                    Node::File(e) => (e.name, FileType::Regular),
-                                    Node::Symlink(e) => (e.name, FileType::Symlink),
+                                    Node::Directory { .. } => (name.into(), FileType::Directory),
+                                    Node::File { .. } => (name.clone().into(), FileType::Regular),
+                                    Node::Symlink { .. } => (name.into(), FileType::Symlink),
                                 })
                             }
                             Ok(children)
@@ -552,14 +576,14 @@ impl EvalIO for TvixStoreIO {
                             ))?
                         }
                     }
-                    Node::File(_file_node) => {
+                    Node::File { .. } => {
                         // This would normally be a io::ErrorKind::NotADirectory (still unstable)
                         Err(io::Error::new(
                             io::ErrorKind::Unsupported,
                             "tried to readdir path {:?}, which is a file",
                         ))?
                     }
-                    Node::Symlink(_symlink_node) => Err(io::Error::new(
+                    Node::Symlink { .. } => Err(io::Error::new(
                         io::ErrorKind::Unsupported,
                         "read_dir for symlinks is unsupported",
                     ))?,
@@ -600,10 +624,11 @@ mod tests {
     use std::{path::Path, rc::Rc, sync::Arc};
 
     use bstr::ByteSlice;
+    use clap::Parser;
     use tempfile::TempDir;
     use tvix_build::buildservice::DummyBuildService;
     use tvix_eval::{EvalIO, EvaluationResult};
-    use tvix_store::utils::construct_services;
+    use tvix_store::utils::{construct_services, ServiceUrlsMemory};
 
     use super::TvixStoreIO;
     use crate::builtins::{add_derivation_builtins, add_fetcher_builtins, add_import_builtins};
@@ -615,22 +640,27 @@ mod tests {
         let tokio_runtime = tokio::runtime::Runtime::new().unwrap();
         let (blob_service, directory_service, path_info_service, nar_calculation_service) =
             tokio_runtime
-                .block_on(async { construct_services("memory://", "memory://", "memory://").await })
+                .block_on(async {
+                    construct_services(ServiceUrlsMemory::parse_from(std::iter::empty::<&str>()))
+                        .await
+                })
                 .unwrap();
 
         let io = Rc::new(TvixStoreIO::new(
             blob_service,
             directory_service,
-            path_info_service.into(),
+            path_info_service,
             nar_calculation_service.into(),
             Arc::<DummyBuildService>::default(),
             tokio_runtime.handle().clone(),
         ));
-        let mut eval = tvix_eval::Evaluation::new(io.clone() as Rc<dyn EvalIO>, true);
 
-        add_derivation_builtins(&mut eval, io.clone());
-        add_fetcher_builtins(&mut eval, io.clone());
-        add_import_builtins(&mut eval, io);
+        let mut eval_builder =
+            tvix_eval::Evaluation::builder(io.clone() as Rc<dyn EvalIO>).enable_import();
+        eval_builder = add_derivation_builtins(eval_builder, Rc::clone(&io));
+        eval_builder = add_fetcher_builtins(eval_builder, Rc::clone(&io));
+        eval_builder = add_import_builtins(eval_builder, io);
+        let eval = eval_builder.build();
 
         // run the evaluation itself.
         eval.evaluate(str, None)
diff --git a/tvix/nar-bridge-go/.gitignore b/tvix/nar-bridge-go/.gitignore
deleted file mode 100644
index d70e1f8120cc..000000000000
--- a/tvix/nar-bridge-go/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-/nar-bridge-http
-/nar-bridge-pathinfo
diff --git a/tvix/nar-bridge-go/README.md b/tvix/nar-bridge-go/README.md
deleted file mode 100644
index 81431daf38ff..000000000000
--- a/tvix/nar-bridge-go/README.md
+++ /dev/null
@@ -1,7 +0,0 @@
-# //tvix/nar-bridge-go
-
-This exposes a HTTP Binary cache interface (GET/HEAD/PUT requests) for a `tvix-
-store`.
-
-It can be used to configure a tvix-store as a substitutor for Nix, or to upload
-store paths from Nix via `nix copy` into a `tvix-store`.
diff --git a/tvix/nar-bridge-go/cmd/nar-bridge-http/main.go b/tvix/nar-bridge-go/cmd/nar-bridge-http/main.go
deleted file mode 100644
index cf2aaf4901b2..000000000000
--- a/tvix/nar-bridge-go/cmd/nar-bridge-http/main.go
+++ /dev/null
@@ -1,93 +0,0 @@
-package main
-
-import (
-	"context"
-	"os"
-	"os/signal"
-	"runtime/debug"
-	"time"
-
-	"github.com/alecthomas/kong"
-
-	"go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc"
-	"google.golang.org/grpc"
-	"google.golang.org/grpc/credentials/insecure"
-
-	castorev1pb "code.tvl.fyi/tvix/castore-go"
-	narBridgeHttp "code.tvl.fyi/tvix/nar-bridge-go/pkg/http"
-	storev1pb "code.tvl.fyi/tvix/store-go"
-	log "github.com/sirupsen/logrus"
-)
-
-// `help:"Expose a tvix-store gRPC Interface as HTTP NAR/NARinfo"`
-var cli struct {
-	LogLevel        string `enum:"trace,debug,info,warn,error,fatal,panic" help:"The log level to log with" default:"info"`
-	ListenAddr      string `name:"listen-addr" help:"The address this service listens on" type:"string" default:"[::]:9000"`                    //nolint:lll
-	EnableAccessLog bool   `name:"access-log" help:"Enable access logging" type:"bool" default:"true" negatable:""`                             //nolint:lll
-	StoreAddr       string `name:"store-addr" help:"The address to the tvix-store RPC interface this will connect to" default:"localhost:8000"` //nolint:lll
-	EnableOtlp      bool   `name:"otlp" help:"Enable OpenTelemetry for logs, spans, and metrics" default:"true"`                                //nolint:lll
-}
-
-func main() {
-	_ = kong.Parse(&cli)
-
-	logLevel, err := log.ParseLevel(cli.LogLevel)
-	if err != nil {
-		log.Panic("invalid log level")
-		return
-	}
-	log.SetLevel(logLevel)
-
-	ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt)
-	defer stop()
-
-	if cli.EnableOtlp {
-		buildInfo, ok := debug.ReadBuildInfo()
-		if !ok {
-			log.Fatal("failed to read build info")
-		}
-
-		shutdown, err := setupOpenTelemetry(ctx, "nar-bridge-http", buildInfo.Main.Version)
-		if err != nil {
-			log.WithError(err).Fatal("failed to setup OpenTelemetry")
-		}
-		defer shutdown(context.Background())
-	}
-
-	// connect to tvix-store
-	log.Debugf("Dialing to %v", cli.StoreAddr)
-	conn, err := grpc.DialContext(ctx, cli.StoreAddr,
-		grpc.WithTransportCredentials(insecure.NewCredentials()),
-		grpc.WithStatsHandler(otelgrpc.NewClientHandler()),
-	)
-	if err != nil {
-		log.Fatalf("did not connect: %v", err)
-	}
-	defer conn.Close()
-
-	s := narBridgeHttp.New(
-		castorev1pb.NewDirectoryServiceClient(conn),
-		castorev1pb.NewBlobServiceClient(conn),
-		storev1pb.NewPathInfoServiceClient(conn),
-		cli.EnableAccessLog,
-		30,
-	)
-
-	log.Printf("Starting nar-bridge-http at %v", cli.ListenAddr)
-	go s.ListenAndServe(cli.ListenAddr)
-
-	// listen for the interrupt signal.
-	<-ctx.Done()
-
-	// Restore default behaviour on the interrupt signal
-	stop()
-	log.Info("Received Signal, shutting down, press Ctl+C again to force.")
-
-	timeoutCtx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
-	defer cancel()
-
-	if err := s.Shutdown(timeoutCtx); err != nil {
-		log.WithError(err).Warn("failed to shutdown")
-		os.Exit(1)
-	}
-}
diff --git a/tvix/nar-bridge-go/cmd/nar-bridge-http/otel.go b/tvix/nar-bridge-go/cmd/nar-bridge-http/otel.go
deleted file mode 100644
index c446c6ec1a14..000000000000
--- a/tvix/nar-bridge-go/cmd/nar-bridge-http/otel.go
+++ /dev/null
@@ -1,87 +0,0 @@
-package main
-
-import (
-	"context"
-	"errors"
-
-	"go.opentelemetry.io/otel"
-	"go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc"
-	"go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc"
-	"go.opentelemetry.io/otel/propagation"
-	"go.opentelemetry.io/otel/sdk/metric"
-	"go.opentelemetry.io/otel/sdk/resource"
-	"go.opentelemetry.io/otel/sdk/trace"
-	semconv "go.opentelemetry.io/otel/semconv/v1.24.0"
-)
-
-func setupOpenTelemetry(ctx context.Context, serviceName, serviceVersion string) (func(context.Context) error, error) {
-	var shutdownFuncs []func(context.Context) error
-	shutdown := func(ctx context.Context) error {
-		var err error
-		for _, fn := range shutdownFuncs {
-			err = errors.Join(err, fn(ctx))
-		}
-		shutdownFuncs = nil
-		return err
-	}
-
-	res, err := resource.Merge(
-		resource.Default(),
-		resource.NewWithAttributes(
-			semconv.SchemaURL,
-			semconv.ServiceName(serviceName),
-			semconv.ServiceVersion(serviceVersion),
-		),
-	)
-	if err != nil {
-		return nil, errors.Join(err, shutdown(ctx))
-	}
-
-	prop := propagation.NewCompositeTextMapPropagator(
-		propagation.TraceContext{},
-		propagation.Baggage{},
-	)
-	otel.SetTextMapPropagator(prop)
-
-	tracerProvider, err := newTraceProvider(ctx, res)
-	if err != nil {
-		return nil, errors.Join(err, shutdown(ctx))
-	}
-	shutdownFuncs = append(shutdownFuncs, tracerProvider.Shutdown)
-	otel.SetTracerProvider(tracerProvider)
-
-	meterProvider, err := newMeterProvider(ctx, res)
-	if err != nil {
-		return nil, errors.Join(err, shutdown(ctx))
-	}
-	shutdownFuncs = append(shutdownFuncs, meterProvider.Shutdown)
-	otel.SetMeterProvider(meterProvider)
-
-	return shutdown, nil
-}
-
-func newTraceProvider(ctx context.Context, res *resource.Resource) (*trace.TracerProvider, error) {
-	traceExporter, err := otlptracegrpc.New(ctx)
-	if err != nil {
-		return nil, err
-	}
-
-	traceProvider := trace.NewTracerProvider(
-		trace.WithBatcher(traceExporter),
-		trace.WithResource(res),
-	)
-	return traceProvider, nil
-}
-
-func newMeterProvider(ctx context.Context, res *resource.Resource) (*metric.MeterProvider, error) {
-	metricExporter, err := otlpmetricgrpc.New(ctx)
-	if err != nil {
-		return nil, err
-	}
-
-	meterProvider := metric.NewMeterProvider(
-		metric.WithResource(res),
-		metric.WithReader(metric.NewPeriodicReader(metricExporter)),
-	)
-	return meterProvider, nil
-}
diff --git a/tvix/nar-bridge-go/default.nix b/tvix/nar-bridge-go/default.nix
deleted file mode 100644
index 303d9c504114..000000000000
--- a/tvix/nar-bridge-go/default.nix
+++ /dev/null
@@ -1,10 +0,0 @@
-# Target containing just the proto files.
-
-{ depot, pkgs, lib, ... }:
-
-pkgs.buildGoModule {
-  name = "nar-bridge-go";
-  src = depot.third_party.gitignoreSource ./.;
-
-  vendorHash = "sha256-7jugbC5sEGhppjiZgnoLP5A6kQSaHK9vE6cXVZBG22s=";
-}
diff --git a/tvix/nar-bridge-go/go.mod b/tvix/nar-bridge-go/go.mod
deleted file mode 100644
index 3aa0694ff7b1..000000000000
--- a/tvix/nar-bridge-go/go.mod
+++ /dev/null
@@ -1,54 +0,0 @@
-module code.tvl.fyi/tvix/nar-bridge-go
-
-require (
-	code.tvl.fyi/tvix/castore-go v0.0.0-20231105151352-990d6ba2175e
-	code.tvl.fyi/tvix/store-go v0.0.0-20231105203234-f2baad42494f
-	github.com/alecthomas/kong v0.7.1
-	github.com/go-chi/chi v1.5.4
-	github.com/go-chi/chi/v5 v5.0.7
-	github.com/google/go-cmp v0.6.0
-	github.com/multiformats/go-multihash v0.2.1
-	github.com/nix-community/go-nix v0.0.0-20231012070617-9b176785e54d
-	github.com/sirupsen/logrus v1.9.0
-	github.com/stretchr/testify v1.8.4
-	go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.47.0
-	go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.47.0
-	go.opentelemetry.io/otel v1.22.0
-	go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.45.0
-	go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.22.0
-	go.opentelemetry.io/otel/sdk v1.22.0
-	go.opentelemetry.io/otel/sdk/metric v1.22.0
-	golang.org/x/sync v0.4.0
-	google.golang.org/grpc v1.60.1
-	google.golang.org/protobuf v1.32.0
-	lukechampine.com/blake3 v1.2.1
-)
-
-require (
-	github.com/cenkalti/backoff/v4 v4.2.1 // indirect
-	github.com/davecgh/go-spew v1.1.1 // indirect
-	github.com/felixge/httpsnoop v1.0.4 // indirect
-	github.com/go-logr/logr v1.4.1 // indirect
-	github.com/go-logr/stdr v1.2.2 // indirect
-	github.com/golang/protobuf v1.5.3 // indirect
-	github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 // indirect
-	github.com/klauspost/cpuid/v2 v2.2.5 // indirect
-	github.com/minio/sha256-simd v1.0.0 // indirect
-	github.com/mr-tron/base58 v1.2.0 // indirect
-	github.com/multiformats/go-varint v0.0.6 // indirect
-	github.com/pmezard/go-difflib v1.0.0 // indirect
-	github.com/spaolacci/murmur3 v1.1.0 // indirect
-	go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.22.0 // indirect
-	go.opentelemetry.io/otel/metric v1.22.0 // indirect
-	go.opentelemetry.io/otel/trace v1.22.0 // indirect
-	go.opentelemetry.io/proto/otlp v1.0.0 // indirect
-	golang.org/x/crypto v0.18.0 // indirect
-	golang.org/x/net v0.20.0 // indirect
-	golang.org/x/sys v0.16.0 // indirect
-	golang.org/x/text v0.14.0 // indirect
-	google.golang.org/genproto/googleapis/api v0.0.0-20231002182017-d307bd883b97 // indirect
-	google.golang.org/genproto/googleapis/rpc v0.0.0-20231030173426-d783a09b4405 // indirect
-	gopkg.in/yaml.v3 v3.0.1 // indirect
-)
-
-go 1.19
diff --git a/tvix/nar-bridge-go/go.sum b/tvix/nar-bridge-go/go.sum
deleted file mode 100644
index 39f77b906128..000000000000
--- a/tvix/nar-bridge-go/go.sum
+++ /dev/null
@@ -1,120 +0,0 @@
-cloud.google.com/go/compute v1.23.0 h1:tP41Zoavr8ptEqaW6j+LQOnyBBhO7OkOMAGrgLopTwY=
-cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY=
-code.tvl.fyi/tvix/castore-go v0.0.0-20231105151352-990d6ba2175e h1:Nj+anfyEYeEdhnIo2BG/N1ZwQl1IvI7AH3TbNDLwUOA=
-code.tvl.fyi/tvix/castore-go v0.0.0-20231105151352-990d6ba2175e/go.mod h1:+vKbozsa04yy2TWh3kUVU568jaza3Hf0p1jAEoMoCwA=
-code.tvl.fyi/tvix/store-go v0.0.0-20231105203234-f2baad42494f h1:bN3K7oSu3IAHXqS3ETHUgpBPHF9+awKKBRLiM8/1tmI=
-code.tvl.fyi/tvix/store-go v0.0.0-20231105203234-f2baad42494f/go.mod h1:8jpfSC2rGi6VKaKOqqgmflPVSEpUawuRQFwQpQYCMiA=
-github.com/alecthomas/assert/v2 v2.1.0 h1:tbredtNcQnoSd3QBhQWI7QZ3XHOVkw1Moklp2ojoH/0=
-github.com/alecthomas/kong v0.7.1 h1:azoTh0IOfwlAX3qN9sHWTxACE2oV8Bg2gAwBsMwDQY4=
-github.com/alecthomas/kong v0.7.1/go.mod h1:n1iCIO2xS46oE8ZfYCNDqdR0b0wZNrXAIAqro/2132U=
-github.com/alecthomas/repr v0.1.0 h1:ENn2e1+J3k09gyj2shc0dHr/yjaWSHRlrJ4DPMevDqE=
-github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM=
-github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
-github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4 h1:/inchEIKaYC1Akx+H+gqO04wryn5h75LSazbRlnya1k=
-github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
-github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/envoyproxy/protoc-gen-validate v1.0.2 h1:QkIBuU5k+x7/QXPvPPnWXWlCdaBFApVqftFV6k087DA=
-github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
-github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
-github.com/go-chi/chi v1.5.4 h1:QHdzF2szwjqVV4wmByUnTcsbIg7UGaQ0tPF2t5GcAIs=
-github.com/go-chi/chi v1.5.4/go.mod h1:uaf8YgoFazUOkPBG7fxPftUylNumIev9awIWOENIuEg=
-github.com/go-chi/chi/v5 v5.0.7 h1:rDTPXLDHGATaeHvVlLcR4Qe0zftYethFucbjVQ1PxU8=
-github.com/go-chi/chi/v5 v5.0.7/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
-github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
-github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
-github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
-github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
-github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
-github.com/golang/glog v1.1.2 h1:DVjP2PbBOzHyzA+dn3WhHIq4NdVu3Q+pvivFICf/7fo=
-github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
-github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
-github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
-github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
-github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
-github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
-github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms=
-github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0/go.mod h1:YN5jB8ie0yfIUg6VvR9Kz84aCaG7AsGZnLjhHbUqwPg=
-github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
-github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
-github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg=
-github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
-github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
-github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
-github.com/minio/sha256-simd v1.0.0 h1:v1ta+49hkWZyvaKwrQB8elexRqm6Y0aMLjCNsrYxo6g=
-github.com/minio/sha256-simd v1.0.0/go.mod h1:OuYzVNI5vcoYIAmbIvHPl3N3jUzVedXbKy5RFepssQM=
-github.com/mr-tron/base58 v1.2.0 h1:T/HDJBh4ZCPbU39/+c3rRvE0uKBQlU27+QI8LJ4t64o=
-github.com/mr-tron/base58 v1.2.0/go.mod h1:BinMc/sQntlIE1frQmRFPUoPA1Zkr8VRgBdjWI2mNwc=
-github.com/multiformats/go-multihash v0.2.1 h1:aem8ZT0VA2nCHHk7bPJ1BjUbHNciqZC/d16Vve9l108=
-github.com/multiformats/go-multihash v0.2.1/go.mod h1:WxoMcYG85AZVQUyRyo9s4wULvW5qrI9vb2Lt6evduFc=
-github.com/multiformats/go-varint v0.0.6 h1:gk85QWKxh3TazbLxED/NlDVv8+q+ReFJk7Y2W/KhfNY=
-github.com/multiformats/go-varint v0.0.6/go.mod h1:3Ls8CIEsrijN6+B7PbrXRPxHRPuXSrVKRY101jdMZYE=
-github.com/nix-community/go-nix v0.0.0-20231012070617-9b176785e54d h1:kwc1ivTuStqa3iBC2M/ojWPor88+YeIbZGeD2SlMYZ0=
-github.com/nix-community/go-nix v0.0.0-20231012070617-9b176785e54d/go.mod h1:4ZJah5sYrUSsWXIOJIsQ6iVOQyLO+ffhWXU3gblcO+E=
-github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
-github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
-github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0=
-github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
-github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI=
-github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
-github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
-github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
-github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
-go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.47.0 h1:UNQQKPfTDe1J81ViolILjTKPr9WetKW6uei2hFgJmFs=
-go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.47.0/go.mod h1:r9vWsPS/3AQItv3OSlEJ/E4mbrhUbbw18meOjArPtKQ=
-go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.47.0 h1:sv9kVfal0MK0wBMCOGr+HeJm9v803BkJxGrk2au7j08=
-go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.47.0/go.mod h1:SK2UL73Zy1quvRPonmOmRDiWk1KBV3LyIeeIxcEApWw=
-go.opentelemetry.io/otel v1.22.0 h1:xS7Ku+7yTFvDfDraDIJVpw7XPyuHlB9MCiqqX5mcJ6Y=
-go.opentelemetry.io/otel v1.22.0/go.mod h1:eoV4iAi3Ea8LkAEI9+GFT44O6T/D0GWAVFyZVCC6pMI=
-go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.45.0 h1:tfil6di0PoNV7FZdsCS7A5izZoVVQ7AuXtyekbOpG/I=
-go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.45.0/go.mod h1:AKFZIEPOnqB00P63bTjOiah4ZTaRzl1TKwUWpZdYUHI=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.22.0 h1:9M3+rhx7kZCIQQhQRYaZCdNu1V73tm4TvXs2ntl98C4=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.22.0/go.mod h1:noq80iT8rrHP1SfybmPiRGc9dc5M8RPmGvtwo7Oo7tc=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.22.0 h1:H2JFgRcGiyHg7H7bwcwaQJYrNFqCqrbTQ8K4p1OvDu8=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.22.0/go.mod h1:WfCWp1bGoYK8MeULtI15MmQVczfR+bFkk0DF3h06QmQ=
-go.opentelemetry.io/otel/metric v1.22.0 h1:lypMQnGyJYeuYPhOM/bgjbFM6WE44W1/T45er4d8Hhg=
-go.opentelemetry.io/otel/metric v1.22.0/go.mod h1:evJGjVpZv0mQ5QBRJoBF64yMuOf4xCWdXjK8pzFvliY=
-go.opentelemetry.io/otel/sdk v1.22.0 h1:6coWHw9xw7EfClIC/+O31R8IY3/+EiRFHevmHafB2Gw=
-go.opentelemetry.io/otel/sdk v1.22.0/go.mod h1:iu7luyVGYovrRpe2fmj3CVKouQNdTOkxtLzPvPz1DOc=
-go.opentelemetry.io/otel/sdk/metric v1.22.0 h1:ARrRetm1HCVxq0cbnaZQlfwODYJHo3gFL8Z3tSmHBcI=
-go.opentelemetry.io/otel/sdk/metric v1.22.0/go.mod h1:KjQGeMIDlBNEOo6HvjhxIec1p/69/kULDcp4gr0oLQQ=
-go.opentelemetry.io/otel/trace v1.22.0 h1:Hg6pPujv0XG9QaVbGOBVHunyuLcCC3jN7WEhPx83XD0=
-go.opentelemetry.io/otel/trace v1.22.0/go.mod h1:RbbHXVqKES9QhzZq/fE5UnOSILqRt40a21sPw2He1xo=
-go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I=
-go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM=
-go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
-golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc=
-golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
-golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo=
-golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
-golang.org/x/oauth2 v0.13.0 h1:jDDenyj+WgFtmV3zYVoi8aE2BwtXFLWOA67ZfNWftiY=
-golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ=
-golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
-golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU=
-golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
-golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
-golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
-golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
-google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
-google.golang.org/genproto v0.0.0-20231016165738-49dd2c1f3d0b h1:+YaDE2r2OG8t/z5qmsh7Y+XXwCbvadxxZ0YY6mTdrVA=
-google.golang.org/genproto/googleapis/api v0.0.0-20231002182017-d307bd883b97 h1:W18sezcAYs+3tDZX4F80yctqa12jcP1PUS2gQu1zTPU=
-google.golang.org/genproto/googleapis/api v0.0.0-20231002182017-d307bd883b97/go.mod h1:iargEX0SFPm3xcfMI0d1domjg0ZF4Aa0p2awqyxhvF0=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20231030173426-d783a09b4405 h1:AB/lmRny7e2pLhFEYIbl5qkDAUt2h0ZRO4wGPhZf+ik=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20231030173426-d783a09b4405/go.mod h1:67X1fPuzjcrkymZzZV1vvkFeTn2Rvc6lYF9MYFGCcwE=
-google.golang.org/grpc v1.60.1 h1:26+wFr+cNqSGFcOXcabYC0lUVJVRa2Sb2ortSK7VrEU=
-google.golang.org/grpc v1.60.1/go.mod h1:OlCHIeLYqSSsLi6i49B5QGdzaMZK9+M7LXN2FKz4eGM=
-google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
-google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
-google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I=
-google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
-gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
-gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
-gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
-gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
-lukechampine.com/blake3 v1.2.1 h1:YuqqRuaqsGV71BV/nm9xlI0MKUv4QC54jQnBChWbGnI=
-lukechampine.com/blake3 v1.2.1/go.mod h1:0OFRp7fBtAylGVCO40o87sbupkyIGgbpv1+M1k1LM6k=
diff --git a/tvix/nar-bridge-go/pkg/http/nar_get.go b/tvix/nar-bridge-go/pkg/http/nar_get.go
deleted file mode 100644
index 75797f8da90e..000000000000
--- a/tvix/nar-bridge-go/pkg/http/nar_get.go
+++ /dev/null
@@ -1,197 +0,0 @@
-package http
-
-import (
-	"bytes"
-	"context"
-	"encoding/base64"
-	"encoding/hex"
-	"errors"
-	"fmt"
-	"io"
-	"io/fs"
-	"net/http"
-	"sync"
-
-	castorev1pb "code.tvl.fyi/tvix/castore-go"
-	storev1pb "code.tvl.fyi/tvix/store-go"
-	"github.com/go-chi/chi/v5"
-	nixhash "github.com/nix-community/go-nix/pkg/hash"
-	"github.com/nix-community/go-nix/pkg/nixbase32"
-	log "github.com/sirupsen/logrus"
-)
-
-const (
-	narUrl = "/nar/{narhash:^([" + nixbase32.Alphabet + "]{52})$}.nar"
-)
-
-func renderNar(
-	ctx context.Context,
-	log *log.Entry,
-	directoryServiceClient castorev1pb.DirectoryServiceClient,
-	blobServiceClient castorev1pb.BlobServiceClient,
-	narHashDbMu *sync.Mutex,
-	narHashDb map[string]*narData,
-	w io.Writer,
-	narHash *nixhash.Hash,
-	headOnly bool,
-) error {
-	// look in the lookup table
-	narHashDbMu.Lock()
-	narData, found := narHashDb[narHash.SRIString()]
-	narHashDbMu.Unlock()
-
-	rootNode := narData.rootNode
-
-	// if we didn't find anything, return 404.
-	if !found {
-		return fmt.Errorf("narHash not found: %w", fs.ErrNotExist)
-	}
-
-	// if this was only a head request, we're done.
-	if headOnly {
-		return nil
-	}
-
-	directories := make(map[string]*castorev1pb.Directory)
-
-	// If the root node is a directory, ask the directory service for all directories
-	if pathInfoDirectory := rootNode.GetDirectory(); pathInfoDirectory != nil {
-		rootDirectoryDigest := pathInfoDirectory.GetDigest()
-		log = log.WithField("root_directory", base64.StdEncoding.EncodeToString(rootDirectoryDigest))
-
-		directoryStream, err := directoryServiceClient.Get(ctx, &castorev1pb.GetDirectoryRequest{
-			ByWhat: &castorev1pb.GetDirectoryRequest_Digest{
-				Digest: rootDirectoryDigest,
-			},
-			Recursive: true,
-		})
-		if err != nil {
-			return fmt.Errorf("unable to query directory stream: %w", err)
-		}
-
-		// For now, we just stream all of these locally and put them into a hashmap,
-		// which is used in the lookup function below.
-		for {
-			directory, err := directoryStream.Recv()
-			if err != nil {
-				if err == io.EOF {
-					break
-				}
-				return fmt.Errorf("unable to receive from directory stream: %w", err)
-			}
-
-			// calculate directory digest
-			// TODO: do we need to do any more validation?
-			directoryDgst, err := directory.Digest()
-			if err != nil {
-				return fmt.Errorf("unable to calculate directory digest: %w", err)
-			}
-
-			log.WithField("directory", base64.StdEncoding.EncodeToString(directoryDgst)).Debug("received directory node")
-
-			directories[hex.EncodeToString(directoryDgst)] = directory
-		}
-
-	}
-
-	// render the NAR file
-	err := storev1pb.Export(
-		w,
-		rootNode,
-		func(directoryDigest []byte) (*castorev1pb.Directory, error) {
-			log.WithField("directory", base64.StdEncoding.EncodeToString(directoryDigest)).Debug("Get directory")
-			directoryRefStr := hex.EncodeToString(directoryDigest)
-			directory, found := directories[directoryRefStr]
-			if !found {
-				return nil, fmt.Errorf(
-					"directory with hash %v does not exist: %w",
-					directoryDigest,
-					fs.ErrNotExist,
-				)
-			}
-
-			return directory, nil
-		},
-		func(blobDigest []byte) (io.ReadCloser, error) {
-			log.WithField("blob", base64.StdEncoding.EncodeToString(blobDigest)).Debug("Get blob")
-			resp, err := blobServiceClient.Read(ctx, &castorev1pb.ReadBlobRequest{
-				Digest: blobDigest,
-			})
-			if err != nil {
-				return nil, fmt.Errorf("unable to get blob: %w", err)
-			}
-
-			// set up a pipe, let a goroutine write, return the reader.
-			pR, pW := io.Pipe()
-
-			go func() {
-				for {
-					chunk, err := resp.Recv()
-					if errors.Is(err, io.EOF) {
-						break
-					}
-					if err != nil {
-						pW.CloseWithError(fmt.Errorf("receiving chunk: %w", err))
-						return
-					}
-
-					// write the received chunk to the writer part of the pipe
-					if _, err := io.Copy(pW, bytes.NewReader(chunk.GetData())); err != nil {
-						log.WithError(err).Error("writing chunk to pipe")
-						pW.CloseWithError(fmt.Errorf("writing chunk to pipe: %w", err))
-						return
-					}
-				}
-				pW.Close()
-
-			}()
-
-			return io.NopCloser(pR), nil
-		},
-	)
-	if err != nil {
-		return fmt.Errorf("unable to export nar: %w", err)
-	}
-	return nil
-}
-
-func registerNarGet(s *Server) {
-	// produce a handler for rendering NAR files.
-	genNarHandler := func(isHead bool) func(w http.ResponseWriter, r *http.Request) {
-		return func(w http.ResponseWriter, r *http.Request) {
-			defer r.Body.Close()
-
-			ctx := r.Context()
-
-			// parse the narhash sent in the request URL
-			narHash, err := parseNarHashFromUrl(chi.URLParamFromCtx(ctx, "narhash"))
-			if err != nil {
-				log.WithError(err).WithField("url", r.URL).Error("unable to decode nar hash from url")
-				w.WriteHeader(http.StatusBadRequest)
-				_, err := w.Write([]byte("unable to decode nar hash from url"))
-				if err != nil {
-					log.WithError(err).Errorf("unable to write error message to client")
-				}
-
-				return
-			}
-
-			log := log.WithField("narhash_url", narHash.SRIString())
-
-			// TODO: inline more of that function here?
-			err = renderNar(ctx, log, s.directoryServiceClient, s.blobServiceClient, &s.narDbMu, s.narDb, w, narHash, isHead)
-			if err != nil {
-				if errors.Is(err, fs.ErrNotExist) {
-					w.WriteHeader(http.StatusNotFound)
-				} else {
-					log.WithError(err).Warn("unable to render nar")
-					w.WriteHeader(http.StatusInternalServerError)
-				}
-			}
-
-		}
-	}
-
-	s.handler.Head(narUrl, genNarHandler(true))
-	s.handler.Get(narUrl, genNarHandler(false))
-}
diff --git a/tvix/nar-bridge-go/pkg/http/nar_put.go b/tvix/nar-bridge-go/pkg/http/nar_put.go
deleted file mode 100644
index 96bdd38b709d..000000000000
--- a/tvix/nar-bridge-go/pkg/http/nar_put.go
+++ /dev/null
@@ -1,141 +0,0 @@
-package http
-
-import (
-	"bufio"
-	"bytes"
-	"fmt"
-	"net/http"
-
-	castorev1pb "code.tvl.fyi/tvix/castore-go"
-	"code.tvl.fyi/tvix/nar-bridge-go/pkg/importer"
-	"github.com/go-chi/chi/v5"
-	mh "github.com/multiformats/go-multihash/core"
-	nixhash "github.com/nix-community/go-nix/pkg/hash"
-	"github.com/sirupsen/logrus"
-	log "github.com/sirupsen/logrus"
-)
-
-func registerNarPut(s *Server) {
-	s.handler.Put(narUrl, func(w http.ResponseWriter, r *http.Request) {
-		defer r.Body.Close()
-
-		ctx := r.Context()
-
-		// parse the narhash sent in the request URL
-		narHashFromUrl, err := parseNarHashFromUrl(chi.URLParamFromCtx(ctx, "narhash"))
-		if err != nil {
-			log.WithError(err).WithField("url", r.URL).Error("unable to decode nar hash from url")
-			w.WriteHeader(http.StatusBadRequest)
-			_, err := w.Write([]byte("unable to decode nar hash from url"))
-			if err != nil {
-				log.WithError(err).Error("unable to write error message to client")
-			}
-
-			return
-		}
-
-		log := log.WithField("narhash_url", narHashFromUrl.SRIString())
-
-		directoriesUploader := importer.NewDirectoriesUploader(ctx, s.directoryServiceClient)
-		defer directoriesUploader.Done() //nolint:errcheck
-
-		rootNode, narSize, narSha256, err := importer.Import(
-			ctx,
-			// buffer the body by 10MiB
-			bufio.NewReaderSize(r.Body, 10*1024*1024),
-			importer.GenBlobUploaderCb(ctx, s.blobServiceClient),
-			func(directory *castorev1pb.Directory) ([]byte, error) {
-				return directoriesUploader.Put(directory)
-			},
-		)
-
-		if err != nil {
-			log.Errorf("error during NAR import: %v", err)
-			w.WriteHeader(http.StatusInternalServerError)
-			_, err := w.Write([]byte(fmt.Sprintf("error during NAR import: %v", err)))
-			if err != nil {
-				log.WithError(err).Errorf("unable to write error message to client")
-			}
-
-			return
-		}
-
-		log.Debug("closing the stream")
-
-		// Close the directories uploader
-		directoriesPutResponse, err := directoriesUploader.Done()
-		if err != nil {
-			log.WithError(err).Error("error during directory upload")
-			w.WriteHeader(http.StatusBadRequest)
-			_, err := w.Write([]byte("error during directory upload"))
-			if err != nil {
-				log.WithError(err).Errorf("unable to write error message to client")
-			}
-
-			return
-		}
-		// If we uploaded directories (so directoriesPutResponse doesn't return null),
-		// the RootDigest field in directoriesPutResponse should match the digest
-		// returned in the PathInfo struct returned by the `Import` call.
-		// This check ensures the server-side came up with the same root hash.
-
-		if directoriesPutResponse != nil {
-			rootDigestPathInfo := rootNode.GetDirectory().GetDigest()
-			rootDigestDirectoriesPutResponse := directoriesPutResponse.GetRootDigest()
-
-			log := log.WithFields(logrus.Fields{
-				"root_digest_pathinfo":             rootDigestPathInfo,
-				"root_digest_directories_put_resp": rootDigestDirectoriesPutResponse,
-			})
-			if !bytes.Equal(rootDigestPathInfo, rootDigestDirectoriesPutResponse) {
-				log.Errorf("returned root digest doesn't match what's calculated")
-
-				w.WriteHeader(http.StatusBadRequest)
-				_, err := w.Write([]byte("error in root digest calculation"))
-				if err != nil {
-					log.WithError(err).Error("unable to write error message to client")
-				}
-
-				return
-			}
-		}
-
-		// Compare the nar hash specified in the URL with the one that has been
-		// calculated while processing the NAR file.
-		narHash, err := nixhash.FromHashTypeAndDigest(mh.SHA2_256, narSha256)
-		if err != nil {
-			panic("must parse nixbase32")
-		}
-
-		if !bytes.Equal(narHashFromUrl.Digest(), narHash.Digest()) {
-			log := log.WithFields(logrus.Fields{
-				"narhash_received_sha256": narHash.SRIString(),
-				"narsize":                 narSize,
-			})
-			log.Error("received bytes don't match narhash from URL")
-
-			w.WriteHeader(http.StatusBadRequest)
-			_, err := w.Write([]byte("received bytes don't match narHash specified in URL"))
-			if err != nil {
-				log.WithError(err).Errorf("unable to write error message to client")
-			}
-
-			return
-		}
-
-		// Insert the partial pathinfo structs into our lookup map,
-		// so requesting the NAR file will be possible.
-		// The same  might exist already, but it'll have the same contents (so
-		// replacing will be a no-op), except maybe the root node Name field value, which
-		// is safe to ignore (as not part of the NAR).
-		s.narDbMu.Lock()
-		s.narDb[narHash.SRIString()] = &narData{
-			rootNode: rootNode,
-			narSize:  narSize,
-		}
-		s.narDbMu.Unlock()
-
-		// Done!
-	})
-
-}
diff --git a/tvix/nar-bridge-go/pkg/http/narinfo.go b/tvix/nar-bridge-go/pkg/http/narinfo.go
deleted file mode 100644
index e5b99a9505f1..000000000000
--- a/tvix/nar-bridge-go/pkg/http/narinfo.go
+++ /dev/null
@@ -1,51 +0,0 @@
-package http
-
-import (
-	"fmt"
-
-	storev1pb "code.tvl.fyi/tvix/store-go"
-	mh "github.com/multiformats/go-multihash/core"
-	nixhash "github.com/nix-community/go-nix/pkg/hash"
-
-	"github.com/nix-community/go-nix/pkg/narinfo"
-	"github.com/nix-community/go-nix/pkg/narinfo/signature"
-	"github.com/nix-community/go-nix/pkg/nixbase32"
-)
-
-// ToNixNarInfo converts the PathInfo to a narinfo.NarInfo.
-func ToNixNarInfo(p *storev1pb.PathInfo) (*narinfo.NarInfo, error) {
-	// ensure the PathInfo is valid, and extract the StorePath from the node in
-	// there.
-	storePath, err := p.Validate()
-	if err != nil {
-		return nil, fmt.Errorf("failed to validate PathInfo: %w", err)
-	}
-
-	// convert the signatures from storev1pb signatures to narinfo signatures
-	narinfoSignatures := make([]signature.Signature, len(p.GetNarinfo().GetSignatures()))
-	for i, pathInfoSignature := range p.GetNarinfo().GetSignatures() {
-		narinfoSignatures[i] = signature.Signature{
-			Name: pathInfoSignature.GetName(),
-			Data: pathInfoSignature.GetData(),
-		}
-	}
-
-	// produce nixhash for the narsha256.
-	narHash, err := nixhash.FromHashTypeAndDigest(
-		mh.SHA2_256,
-		p.GetNarinfo().GetNarSha256(),
-	)
-	if err != nil {
-		return nil, fmt.Errorf("invalid narsha256: %w", err)
-	}
-
-	return &narinfo.NarInfo{
-		StorePath:   storePath.Absolute(),
-		URL:         "nar/" + nixbase32.EncodeToString(narHash.Digest()) + ".nar",
-		Compression: "none",
-		NarHash:     narHash,
-		NarSize:     uint64(p.GetNarinfo().GetNarSize()),
-		References:  p.GetNarinfo().GetReferenceNames(),
-		Signatures:  narinfoSignatures,
-	}, nil
-}
diff --git a/tvix/nar-bridge-go/pkg/http/narinfo_get.go b/tvix/nar-bridge-go/pkg/http/narinfo_get.go
deleted file mode 100644
index d43cb58078da..000000000000
--- a/tvix/nar-bridge-go/pkg/http/narinfo_get.go
+++ /dev/null
@@ -1,137 +0,0 @@
-package http
-
-import (
-	"context"
-	"encoding/base64"
-	"errors"
-	"fmt"
-	"io"
-	"io/fs"
-	"net/http"
-	"strings"
-	"sync"
-
-	storev1pb "code.tvl.fyi/tvix/store-go"
-	"github.com/go-chi/chi/v5"
-	nixhash "github.com/nix-community/go-nix/pkg/hash"
-	"github.com/nix-community/go-nix/pkg/nixbase32"
-	log "github.com/sirupsen/logrus"
-	"google.golang.org/grpc/codes"
-	"google.golang.org/grpc/status"
-)
-
-// renderNarinfo writes narinfo contents to a passed io.Writer, or a returns a
-// (wrapped) io.ErrNoExist error if something doesn't exist.
-// if headOnly is set to true, only the existence is checked, but no content is
-// actually written.
-func renderNarinfo(
-	ctx context.Context,
-	log *log.Entry,
-	pathInfoServiceClient storev1pb.PathInfoServiceClient,
-	narHashToPathInfoMu *sync.Mutex,
-	narHashToPathInfo map[string]*narData,
-	outputHash []byte,
-	w io.Writer,
-	headOnly bool,
-) error {
-	pathInfo, err := pathInfoServiceClient.Get(ctx, &storev1pb.GetPathInfoRequest{
-		ByWhat: &storev1pb.GetPathInfoRequest_ByOutputHash{
-			ByOutputHash: outputHash,
-		},
-	})
-	if err != nil {
-		st, ok := status.FromError(err)
-		if ok {
-			if st.Code() == codes.NotFound {
-				return fmt.Errorf("output hash %v not found: %w", base64.StdEncoding.EncodeToString(outputHash), fs.ErrNotExist)
-			}
-			return fmt.Errorf("unable to get pathinfo, code %v: %w", st.Code(), err)
-		}
-
-		return fmt.Errorf("unable to get pathinfo: %w", err)
-	}
-
-	log = log.WithField("pathInfo", pathInfo)
-
-	if _, err := pathInfo.Validate(); err != nil {
-		log.WithError(err).Error("unable to validate PathInfo")
-
-		return fmt.Errorf("unable to validate PathInfo: %w", err)
-	}
-
-	if pathInfo.GetNarinfo() == nil {
-		log.Error("PathInfo doesn't contain Narinfo field")
-
-		return fmt.Errorf("PathInfo doesn't contain Narinfo field")
-	}
-
-	// extract the NARHash. This must succeed, as Validate() did succeed.
-	narHash, err := nixhash.FromHashTypeAndDigest(0x12, pathInfo.GetNarinfo().GetNarSha256())
-	if err != nil {
-		panic("must parse NarHash")
-	}
-
-	// add things to the lookup table, in case the same process didn't handle the NAR hash yet.
-	narHashToPathInfoMu.Lock()
-	narHashToPathInfo[narHash.SRIString()] = &narData{
-		rootNode: pathInfo.GetNode(),
-		narSize:  pathInfo.GetNarinfo().GetNarSize(),
-	}
-	narHashToPathInfoMu.Unlock()
-
-	if headOnly {
-		return nil
-	}
-
-	// convert the PathInfo to NARInfo.
-	narInfo, err := ToNixNarInfo(pathInfo)
-
-	// Write it out to the client.
-	_, err = io.Copy(w, strings.NewReader(narInfo.String()))
-	if err != nil {
-		return fmt.Errorf("unable to write narinfo to client: %w", err)
-	}
-
-	return nil
-}
-
-func registerNarinfoGet(s *Server) {
-	// GET/HEAD $outHash.narinfo looks up the PathInfo from the tvix-store,
-	// and, if it's a GET request, render a .narinfo file to the client.
-	// In both cases it will keep the PathInfo in the lookup map,
-	// so a subsequent GET/HEAD /nar/ $narhash.nar request can find it.
-	genNarinfoHandler := func(isHead bool) func(w http.ResponseWriter, r *http.Request) {
-		return func(w http.ResponseWriter, r *http.Request) {
-			defer r.Body.Close()
-
-			ctx := r.Context()
-			log := log.WithField("outputhash", chi.URLParamFromCtx(ctx, "outputhash"))
-
-			// parse the output hash sent in the request URL
-			outputHash, err := nixbase32.DecodeString(chi.URLParamFromCtx(ctx, "outputhash"))
-			if err != nil {
-				log.WithError(err).Error("unable to decode output hash from url")
-				w.WriteHeader(http.StatusBadRequest)
-				_, err := w.Write([]byte("unable to decode output hash from url"))
-				if err != nil {
-					log.WithError(err).Errorf("unable to write error message to client")
-				}
-
-				return
-			}
-
-			err = renderNarinfo(ctx, log, s.pathInfoServiceClient, &s.narDbMu, s.narDb, outputHash, w, isHead)
-			if err != nil {
-				if errors.Is(err, fs.ErrNotExist) {
-					w.WriteHeader(http.StatusNotFound)
-				} else {
-					log.WithError(err).Warn("unable to render narinfo")
-					w.WriteHeader(http.StatusInternalServerError)
-				}
-			}
-		}
-	}
-
-	s.handler.Get("/{outputhash:^["+nixbase32.Alphabet+"]{32}}.narinfo", genNarinfoHandler(false))
-	s.handler.Head("/{outputhash:^["+nixbase32.Alphabet+"]{32}}.narinfo", genNarinfoHandler(true))
-}
diff --git a/tvix/nar-bridge-go/pkg/http/narinfo_put.go b/tvix/nar-bridge-go/pkg/http/narinfo_put.go
deleted file mode 100644
index 0e2ae989c039..000000000000
--- a/tvix/nar-bridge-go/pkg/http/narinfo_put.go
+++ /dev/null
@@ -1,103 +0,0 @@
-package http
-
-import (
-	"net/http"
-
-	"code.tvl.fyi/tvix/nar-bridge-go/pkg/importer"
-	"github.com/go-chi/chi/v5"
-	"github.com/nix-community/go-nix/pkg/narinfo"
-	"github.com/nix-community/go-nix/pkg/nixbase32"
-	"github.com/sirupsen/logrus"
-	log "github.com/sirupsen/logrus"
-)
-
-func registerNarinfoPut(s *Server) {
-	s.handler.Put("/{outputhash:^["+nixbase32.Alphabet+"]{32}}.narinfo", func(w http.ResponseWriter, r *http.Request) {
-		defer r.Body.Close()
-
-		ctx := r.Context()
-		log := log.WithField("outputhash", chi.URLParamFromCtx(ctx, "outputhash"))
-
-		// TODO: decide on merging behaviour.
-		// Maybe it's fine to add if contents are the same, but more sigs can be added?
-		// Right now, just replace a .narinfo for a path that already exists.
-
-		// read and parse the .narinfo file
-		narInfo, err := narinfo.Parse(r.Body)
-		if err != nil {
-			log.WithError(err).Error("unable to parse narinfo")
-			w.WriteHeader(http.StatusBadRequest)
-			_, err := w.Write([]byte("unable to parse narinfo"))
-			if err != nil {
-				log.WithError(err).Errorf("unable to write error message to client")
-			}
-
-			return
-		}
-
-		log = log.WithFields(logrus.Fields{
-			"narhash":     narInfo.NarHash.SRIString(),
-			"output_path": narInfo.StorePath,
-		})
-
-		// look up the narHash in our temporary map
-		s.narDbMu.Lock()
-		narData, found := s.narDb[narInfo.NarHash.SRIString()]
-		s.narDbMu.Unlock()
-		if !found {
-			log.Error("unable to find referred NAR")
-			w.WriteHeader(http.StatusBadRequest)
-			_, err := w.Write([]byte("unable to find referred NAR"))
-			if err != nil {
-				log.WithError(err).Errorf("unable to write error message to client")
-			}
-
-			return
-		}
-
-		rootNode := narData.rootNode
-
-		// compare fields with what we computed while receiving the NAR file
-
-		// NarSize needs to match
-		if narData.narSize != narInfo.NarSize {
-			log.Error("narsize mismatch")
-			w.WriteHeader(http.StatusBadRequest)
-			_, err := w.Write([]byte("unable to parse narinfo"))
-			if err != nil {
-				log.WithError(err).Errorf("unable to write error message to client")
-			}
-
-			return
-		}
-
-		pathInfo, err := importer.GenPathInfo(rootNode, narInfo)
-		if err != nil {
-			log.WithError(err).Error("unable to generate PathInfo")
-
-			w.WriteHeader(http.StatusInternalServerError)
-			_, err := w.Write([]byte("unable to generate PathInfo"))
-			if err != nil {
-				log.WithError(err).Errorf("unable to write error message to client")
-			}
-
-			return
-		}
-
-		log.WithField("pathInfo", pathInfo).Debug("inserted new pathInfo")
-
-		receivedPathInfo, err := s.pathInfoServiceClient.Put(ctx, pathInfo)
-		if err != nil {
-			log.WithError(err).Error("unable to upload pathinfo to service")
-			w.WriteHeader(http.StatusInternalServerError)
-			_, err := w.Write([]byte("unable to upload pathinfo to server"))
-			if err != nil {
-				log.WithError(err).Errorf("unable to write error message to client")
-			}
-
-			return
-		}
-
-		log.WithField("pathInfo", receivedPathInfo).Debug("got back PathInfo")
-	})
-}
diff --git a/tvix/nar-bridge-go/pkg/http/server.go b/tvix/nar-bridge-go/pkg/http/server.go
deleted file mode 100644
index fbcb20be18b7..000000000000
--- a/tvix/nar-bridge-go/pkg/http/server.go
+++ /dev/null
@@ -1,119 +0,0 @@
-package http
-
-import (
-	"context"
-	"fmt"
-	"net"
-	"net/http"
-	"strings"
-	"sync"
-	"time"
-
-	castorev1pb "code.tvl.fyi/tvix/castore-go"
-	storev1pb "code.tvl.fyi/tvix/store-go"
-	"github.com/go-chi/chi/middleware"
-	"github.com/go-chi/chi/v5"
-	log "github.com/sirupsen/logrus"
-	"go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp"
-)
-
-type Server struct {
-	srv     *http.Server
-	handler chi.Router
-
-	directoryServiceClient castorev1pb.DirectoryServiceClient
-	blobServiceClient      castorev1pb.BlobServiceClient
-	pathInfoServiceClient  storev1pb.PathInfoServiceClient
-
-	// When uploading NAR files to a HTTP binary cache, the .nar
-	// files are uploaded before the .narinfo files.
-	// We need *both* to be able to fully construct a PathInfo object.
-	// Keep a in-memory map of narhash(es) (in SRI) to (unnamed) root node and nar
-	// size.
-	// This is necessary until we can ask a PathInfoService for a node with a given
-	// narSha256.
-	narDbMu sync.Mutex
-	narDb   map[string]*narData
-}
-
-type narData struct {
-	rootNode *castorev1pb.Node
-	narSize  uint64
-}
-
-func New(
-	directoryServiceClient castorev1pb.DirectoryServiceClient,
-	blobServiceClient castorev1pb.BlobServiceClient,
-	pathInfoServiceClient storev1pb.PathInfoServiceClient,
-	enableAccessLog bool,
-	priority int,
-) *Server {
-	r := chi.NewRouter()
-	r.Use(func(h http.Handler) http.Handler {
-		return otelhttp.NewHandler(h, "http.request")
-	})
-
-	if enableAccessLog {
-		r.Use(middleware.Logger)
-	}
-
-	r.Get("/", func(w http.ResponseWriter, r *http.Request) {
-		_, err := w.Write([]byte("nar-bridge"))
-		if err != nil {
-			log.Errorf("Unable to write response: %v", err)
-		}
-	})
-
-	r.Get("/nix-cache-info", func(w http.ResponseWriter, r *http.Request) {
-		_, err := w.Write([]byte(fmt.Sprintf("StoreDir: /nix/store\nWantMassQuery: 1\nPriority: %d\n", priority)))
-		if err != nil {
-			log.Errorf("Unable to write response: %v", err)
-		}
-	})
-
-	s := &Server{
-		handler:                r,
-		directoryServiceClient: directoryServiceClient,
-		blobServiceClient:      blobServiceClient,
-		pathInfoServiceClient:  pathInfoServiceClient,
-		narDb:                  make(map[string]*narData),
-	}
-
-	registerNarPut(s)
-	registerNarinfoPut(s)
-
-	registerNarinfoGet(s)
-	registerNarGet(s)
-
-	return s
-}
-
-func (s *Server) Shutdown(ctx context.Context) error {
-	return s.srv.Shutdown(ctx)
-}
-
-// ListenAndServer starts the webserver, and waits for it being closed or
-// shutdown, after which it'll return ErrServerClosed.
-func (s *Server) ListenAndServe(addr string) error {
-	s.srv = &http.Server{
-		Handler:      s.handler,
-		ReadTimeout:  500 * time.Second,
-		WriteTimeout: 500 * time.Second,
-		IdleTimeout:  500 * time.Second,
-	}
-
-	var listener net.Listener
-	var err error
-
-	// check addr. If it contains slashes, assume it's a unix domain socket.
-	if strings.Contains(addr, "/") {
-		listener, err = net.Listen("unix", addr)
-	} else {
-		listener, err = net.Listen("tcp", addr)
-	}
-	if err != nil {
-		return fmt.Errorf("unable to listen on %v: %w", addr, err)
-	}
-
-	return s.srv.Serve(listener)
-}
diff --git a/tvix/nar-bridge-go/pkg/http/util.go b/tvix/nar-bridge-go/pkg/http/util.go
deleted file mode 100644
index 60febea1f430..000000000000
--- a/tvix/nar-bridge-go/pkg/http/util.go
+++ /dev/null
@@ -1,24 +0,0 @@
-package http
-
-import (
-	"fmt"
-	nixhash "github.com/nix-community/go-nix/pkg/hash"
-)
-
-// parseNarHashFromUrl parses a nixbase32 string representing a sha256 NarHash
-// and returns a nixhash.Hash when it was able to parse, or an error.
-func parseNarHashFromUrl(narHashFromUrl string) (*nixhash.Hash, error) {
-	// peek at the length. If it's 52 characters, assume sha256,
-	// if it's something else, this is an error.
-	l := len(narHashFromUrl)
-	if l != 52 {
-		return nil, fmt.Errorf("invalid length of narHash: %v", l)
-	}
-
-	nixHash, err := nixhash.ParseNixBase32("sha256:" + narHashFromUrl)
-	if err != nil {
-		return nil, fmt.Errorf("unable to parse nixbase32 hash: %w", err)
-	}
-
-	return nixHash, nil
-}
diff --git a/tvix/nar-bridge-go/pkg/importer/blob_upload.go b/tvix/nar-bridge-go/pkg/importer/blob_upload.go
deleted file mode 100644
index c1255dd3ad5d..000000000000
--- a/tvix/nar-bridge-go/pkg/importer/blob_upload.go
+++ /dev/null
@@ -1,71 +0,0 @@
-package importer
-
-import (
-	"bufio"
-	"context"
-	"encoding/base64"
-	"errors"
-	"fmt"
-	"io"
-
-	castorev1pb "code.tvl.fyi/tvix/castore-go"
-	log "github.com/sirupsen/logrus"
-)
-
-// the size of individual BlobChunk we send when uploading to BlobService.
-const chunkSize = 1024 * 1024
-
-// this produces a callback function that can be used as blobCb for the
-// importer.Import function call.
-func GenBlobUploaderCb(ctx context.Context, blobServiceClient castorev1pb.BlobServiceClient) func(io.Reader) ([]byte, error) {
-	return func(blobReader io.Reader) ([]byte, error) {
-		// Ensure the blobReader is buffered to at least the chunk size.
-		blobReader = bufio.NewReaderSize(blobReader, chunkSize)
-
-		putter, err := blobServiceClient.Put(ctx)
-		if err != nil {
-			// return error to the importer
-			return nil, fmt.Errorf("error from blob service: %w", err)
-		}
-
-		blobSize := 0
-		chunk := make([]byte, chunkSize)
-
-		for {
-			n, err := blobReader.Read(chunk)
-			if err != nil && !errors.Is(err, io.EOF) {
-				return nil, fmt.Errorf("unable to read from blobreader: %w", err)
-			}
-
-			if n != 0 {
-				log.WithField("chunk_size", n).Debug("sending chunk")
-				blobSize += n
-
-				// send the blob chunk to the server. The err is only valid in the inner scope
-				if err := putter.Send(&castorev1pb.BlobChunk{
-					Data: chunk[:n],
-				}); err != nil {
-					return nil, fmt.Errorf("sending blob chunk: %w", err)
-				}
-			}
-
-			// if our read from blobReader returned an EOF, we're done reading
-			if errors.Is(err, io.EOF) {
-				break
-			}
-
-		}
-
-		resp, err := putter.CloseAndRecv()
-		if err != nil {
-			return nil, fmt.Errorf("close blob putter: %w", err)
-		}
-
-		log.WithFields(log.Fields{
-			"blob_digest": base64.StdEncoding.EncodeToString(resp.GetDigest()),
-			"blob_size":   blobSize,
-		}).Debug("uploaded blob")
-
-		return resp.GetDigest(), nil
-	}
-}
diff --git a/tvix/nar-bridge-go/pkg/importer/counting_writer.go b/tvix/nar-bridge-go/pkg/importer/counting_writer.go
deleted file mode 100644
index d003a4b11bfd..000000000000
--- a/tvix/nar-bridge-go/pkg/importer/counting_writer.go
+++ /dev/null
@@ -1,21 +0,0 @@
-package importer
-
-import (
-	"io"
-)
-
-// CountingWriter implements io.Writer.
-var _ io.Writer = &CountingWriter{}
-
-type CountingWriter struct {
-	bytesWritten uint64
-}
-
-func (cw *CountingWriter) Write(p []byte) (n int, err error) {
-	cw.bytesWritten += uint64(len(p))
-	return len(p), nil
-}
-
-func (cw *CountingWriter) BytesWritten() uint64 {
-	return cw.bytesWritten
-}
diff --git a/tvix/nar-bridge-go/pkg/importer/directory_upload.go b/tvix/nar-bridge-go/pkg/importer/directory_upload.go
deleted file mode 100644
index 117f442fa54f..000000000000
--- a/tvix/nar-bridge-go/pkg/importer/directory_upload.go
+++ /dev/null
@@ -1,88 +0,0 @@
-package importer
-
-import (
-	"bytes"
-	"context"
-	"encoding/base64"
-	"fmt"
-
-	castorev1pb "code.tvl.fyi/tvix/castore-go"
-	log "github.com/sirupsen/logrus"
-)
-
-// DirectoriesUploader opens a Put stream when it receives the first Put() call,
-// and then uses the opened stream for subsequent Put() calls.
-// When the uploading is finished, a call to Done() will close the stream and
-// return the root digest returned from the directoryServiceClient.
-type DirectoriesUploader struct {
-	ctx                       context.Context
-	directoryServiceClient    castorev1pb.DirectoryServiceClient
-	directoryServicePutStream castorev1pb.DirectoryService_PutClient
-	lastDirectoryDigest       []byte
-}
-
-func NewDirectoriesUploader(ctx context.Context, directoryServiceClient castorev1pb.DirectoryServiceClient) *DirectoriesUploader {
-	return &DirectoriesUploader{
-		ctx:                       ctx,
-		directoryServiceClient:    directoryServiceClient,
-		directoryServicePutStream: nil,
-	}
-}
-
-func (du *DirectoriesUploader) Put(directory *castorev1pb.Directory) ([]byte, error) {
-	directoryDigest, err := directory.Digest()
-	if err != nil {
-		return nil, fmt.Errorf("failed calculating directory digest: %w", err)
-	}
-
-	// Send the directory to the directory service
-	// If the stream hasn't been initialized yet, do it first
-	if du.directoryServicePutStream == nil {
-		directoryServicePutStream, err := du.directoryServiceClient.Put(du.ctx)
-		if err != nil {
-			return nil, fmt.Errorf("unable to initialize directory service put stream: %v", err)
-		}
-		du.directoryServicePutStream = directoryServicePutStream
-	}
-
-	// send the directory out
-	err = du.directoryServicePutStream.Send(directory)
-	if err != nil {
-		return nil, fmt.Errorf("error sending directory: %w", err)
-	}
-	log.WithField("digest", base64.StdEncoding.EncodeToString(directoryDigest)).Debug("uploaded directory")
-
-	// update lastDirectoryDigest
-	du.lastDirectoryDigest = directoryDigest
-
-	return directoryDigest, nil
-}
-
-// Done closes the stream and returns the response.
-// It returns null if closed for a second time.
-func (du *DirectoriesUploader) Done() (*castorev1pb.PutDirectoryResponse, error) {
-	// only close once, and only if we opened.
-	if du.directoryServicePutStream == nil {
-		return nil, nil
-	}
-
-	putDirectoryResponse, err := du.directoryServicePutStream.CloseAndRecv()
-	if err != nil {
-		return nil, fmt.Errorf("unable to close directory service put stream: %v", err)
-	}
-
-	// ensure the response contains the same digest as the one we have in lastDirectoryDigest.
-	// Otherwise, the backend came up with another digest than we, in which we return an error.
-	if !bytes.Equal(du.lastDirectoryDigest, putDirectoryResponse.RootDigest) {
-		return nil, fmt.Errorf(
-			"backend calculated different root digest as we, expected %s, actual %s",
-			base64.StdEncoding.EncodeToString(du.lastDirectoryDigest),
-			base64.StdEncoding.EncodeToString(putDirectoryResponse.RootDigest),
-		)
-	}
-
-	// clear directoryServicePutStream.
-	du.directoryServicePutStream = nil
-
-	return putDirectoryResponse, nil
-}
diff --git a/tvix/nar-bridge-go/pkg/importer/gen_pathinfo.go b/tvix/nar-bridge-go/pkg/importer/gen_pathinfo.go
deleted file mode 100644
index bdc298a9a399..000000000000
--- a/tvix/nar-bridge-go/pkg/importer/gen_pathinfo.go
+++ /dev/null
@@ -1,62 +0,0 @@
-package importer
-
-import (
-	castorev1pb "code.tvl.fyi/tvix/castore-go"
-	storev1pb "code.tvl.fyi/tvix/store-go"
-	"fmt"
-	"github.com/nix-community/go-nix/pkg/narinfo"
-	"github.com/nix-community/go-nix/pkg/storepath"
-)
-
-// GenPathInfo takes a rootNode and narInfo and assembles a PathInfo.
-// The rootNode is renamed to match the StorePath in the narInfo.
-func GenPathInfo(rootNode *castorev1pb.Node, narInfo *narinfo.NarInfo) (*storev1pb.PathInfo, error) {
-	// parse the storePath from the .narinfo
-	storePath, err := storepath.FromAbsolutePath(narInfo.StorePath)
-	if err != nil {
-		return nil, fmt.Errorf("unable to parse StorePath: %w", err)
-	}
-
-	// construct the references, by parsing ReferenceNames and extracting the digest
-	references := make([][]byte, len(narInfo.References))
-	for i, referenceStr := range narInfo.References {
-		// parse reference as store path
-		referenceStorePath, err := storepath.FromString(referenceStr)
-		if err != nil {
-			return nil, fmt.Errorf("unable to parse reference %s as storepath: %w", referenceStr, err)
-		}
-		references[i] = referenceStorePath.Digest
-	}
-
-	// construct the narInfo.Signatures[*] from pathInfo.Narinfo.Signatures[*]
-	narinfoSignatures := make([]*storev1pb.NARInfo_Signature, len(narInfo.Signatures))
-	for i, narinfoSig := range narInfo.Signatures {
-		narinfoSignatures[i] = &storev1pb.NARInfo_Signature{
-			Name: narinfoSig.Name,
-			Data: narinfoSig.Data,
-		}
-	}
-
-	// assemble the PathInfo.
-	pathInfo := &storev1pb.PathInfo{
-		// embed a new root node with the name set to the store path basename.
-		Node:       castorev1pb.RenamedNode(rootNode, storePath.String()),
-		References: references,
-		Narinfo: &storev1pb.NARInfo{
-			NarSize:        narInfo.NarSize,
-			NarSha256:      narInfo.FileHash.Digest(),
-			Signatures:     narinfoSignatures,
-			ReferenceNames: narInfo.References,
-		},
-	}
-
-	// run Validate on the PathInfo, more as an additional sanity check our code is sound,
-	// to make sure we populated everything properly, before returning it.
-	// Fail hard if we fail validation, this is a code error.
-	if _, err = pathInfo.Validate(); err != nil {
-		panic(fmt.Sprintf("PathInfo failed validation: %v", err))
-	}
-
-	return pathInfo, nil
-
-}
diff --git a/tvix/nar-bridge-go/pkg/importer/importer.go b/tvix/nar-bridge-go/pkg/importer/importer.go
deleted file mode 100644
index fce6c5f293da..000000000000
--- a/tvix/nar-bridge-go/pkg/importer/importer.go
+++ /dev/null
@@ -1,303 +0,0 @@
-package importer
-
-import (
-	"bytes"
-	"context"
-	"crypto/sha256"
-	"errors"
-	"fmt"
-	"io"
-	"path"
-	"strings"
-
-	castorev1pb "code.tvl.fyi/tvix/castore-go"
-	"github.com/nix-community/go-nix/pkg/nar"
-	"golang.org/x/sync/errgroup"
-	"lukechampine.com/blake3"
-)
-
-const (
-	// asyncUploadThreshold controls when a file is buffered into memory and uploaded
-	// asynchronously. Files must be smaller than the threshold to be uploaded asynchronously.
-	asyncUploadThreshold = 1024 * 1024 // 1 MiB
-	// maxAsyncUploadBufferBytes is the maximum number of async blob uploads allowed to be
-	// running concurrently at any given time for a simple import operation.
-	maxConcurrentAsyncUploads = 128
-)
-
-// An item on the directories stack
-type stackItem struct {
-	path      string
-	directory *castorev1pb.Directory
-}
-
-// Import reads a NAR from a reader, and returns a the root node,
-// NAR size and NAR sha256 digest.
-func Import(
-	// a context, to support cancellation
-	ctx context.Context,
-	// The reader the data is read from
-	r io.Reader,
-	// callback function called with each regular file content
-	blobCb func(fileReader io.Reader) ([]byte, error),
-	// callback function called with each finalized directory node
-	directoryCb func(directory *castorev1pb.Directory) ([]byte, error),
-) (*castorev1pb.Node, uint64, []byte, error) {
-	// We need to wrap the underlying reader a bit.
-	// - we want to keep track of the number of bytes read in total
-	// - we calculate the sha256 digest over all data read
-	// Express these two things in a MultiWriter, and give the NAR reader a
-	// TeeReader that writes to it.
-	narCountW := &CountingWriter{}
-	sha256W := sha256.New()
-	multiW := io.MultiWriter(narCountW, sha256W)
-	narReader, err := nar.NewReader(io.TeeReader(r, multiW))
-	if err != nil {
-		return nil, 0, nil, fmt.Errorf("failed to instantiate nar reader: %w", err)
-	}
-	defer narReader.Close()
-
-	// If we store a symlink or regular file at the root, these are not nil.
-	// If they are nil, we instead have a stackDirectory.
-	var rootSymlink *castorev1pb.SymlinkNode
-	var rootFile *castorev1pb.FileNode
-	var stackDirectory *castorev1pb.Directory
-
-	// Keep track of all asynch blob uploads so we can make sure they all succeed
-	// before returning.
-	var asyncBlobWg errgroup.Group
-	asyncBlobWg.SetLimit(maxConcurrentAsyncUploads)
-
-	var stack = []stackItem{}
-
-	// popFromStack is used when we transition to a different directory or
-	// drain the stack when we reach the end of the NAR.
-	// It adds the popped element to the element underneath if any,
-	// and passes it to the directoryCb callback.
-	// This function may only be called if the stack is not already empty.
-	popFromStack := func() error {
-		// Keep the top item, and "resize" the stack slice.
-		// This will only make the last element unaccessible, but chances are high
-		// we're re-using that space anyways.
-		toPop := stack[len(stack)-1]
-		stack = stack[:len(stack)-1]
-
-		// call the directoryCb
-		directoryDigest, err := directoryCb(toPop.directory)
-		if err != nil {
-			return fmt.Errorf("failed calling directoryCb: %w", err)
-		}
-
-		// if there's still a parent left on the stack, refer to it from there.
-		if len(stack) > 0 {
-			topOfStack := stack[len(stack)-1].directory
-			topOfStack.Directories = append(topOfStack.Directories, &castorev1pb.DirectoryNode{
-				Name:   []byte(path.Base(toPop.path)),
-				Digest: directoryDigest,
-				Size:   toPop.directory.Size(),
-			})
-		}
-		// Keep track that we have encounter at least one directory
-		stackDirectory = toPop.directory
-		return nil
-	}
-
-	getBasename := func(p string) string {
-		// extract the basename. In case of "/", replace with empty string.
-		basename := path.Base(p)
-		if basename == "/" {
-			basename = ""
-		}
-		return basename
-	}
-
-	for {
-		select {
-		case <-ctx.Done():
-			return nil, 0, nil, ctx.Err()
-		default:
-			// call narReader.Next() to get the next element
-			hdr, err := narReader.Next()
-
-			// If this returns an error, it's either EOF (when we're done reading from the NAR),
-			// or another error.
-			if err != nil {
-				// if this returns no EOF, bail out
-				if !errors.Is(err, io.EOF) {
-					return nil, 0, nil, fmt.Errorf("failed getting next nar element: %w", err)
-				}
-
-				// The NAR has been read all the way to the endโ€ฆ
-				// Make sure we close the nar reader, which might read some final trailers.
-				if err := narReader.Close(); err != nil {
-					return nil, 0, nil, fmt.Errorf("unable to close nar reader: %w", err)
-				}
-
-				// Check the stack. While it's not empty, we need to pop things off the stack.
-				for len(stack) > 0 {
-					err := popFromStack()
-					if err != nil {
-						return nil, 0, nil, fmt.Errorf("unable to pop from stack: %w", err)
-					}
-				}
-
-				// Wait for any pending blob uploads.
-				err := asyncBlobWg.Wait()
-				if err != nil {
-					return nil, 0, nil, fmt.Errorf("async blob upload: %w", err)
-				}
-
-				// Stack is empty.
-				// Now either root{File,Symlink,Directory} is not nil,
-				// and we can return the root node.
-				narSize := narCountW.BytesWritten()
-				narSha256 := sha256W.Sum(nil)
-
-				if rootFile != nil {
-					return &castorev1pb.Node{
-						Node: &castorev1pb.Node_File{
-							File: rootFile,
-						},
-					}, narSize, narSha256, nil
-				} else if rootSymlink != nil {
-					return &castorev1pb.Node{
-						Node: &castorev1pb.Node_Symlink{
-							Symlink: rootSymlink,
-						},
-					}, narSize, narSha256, nil
-				} else if stackDirectory != nil {
-					// calculate directory digest (i.e. after we received all its contents)
-					dgst, err := stackDirectory.Digest()
-					if err != nil {
-						return nil, 0, nil, fmt.Errorf("unable to calculate root directory digest: %w", err)
-					}
-
-					return &castorev1pb.Node{
-						Node: &castorev1pb.Node_Directory{
-							Directory: &castorev1pb.DirectoryNode{
-								Name:   []byte{},
-								Digest: dgst,
-								Size:   stackDirectory.Size(),
-							},
-						},
-					}, narSize, narSha256, nil
-				} else {
-					return nil, 0, nil, fmt.Errorf("no root set")
-				}
-			}
-
-			// Check for valid path transitions, pop from stack if needed
-			// The nar reader already gives us some guarantees about ordering and illegal transitions,
-			// So we really only need to check if the top-of-stack path is a prefix of the path,
-			// and if it's not, pop from the stack. We do this repeatedly until the top of the stack is
-			// the subdirectory the new entry is in, or we hit the root directory.
-
-			// We don't need to worry about the root node case, because we can only finish the root "/"
-			// If we're at the end of the NAR reader (covered by the EOF check)
-			for len(stack) > 1 && !strings.HasPrefix(hdr.Path, stack[len(stack)-1].path+"/") {
-				err := popFromStack()
-				if err != nil {
-					return nil, 0, nil, fmt.Errorf("unable to pop from stack: %w", err)
-				}
-			}
-
-			if hdr.Type == nar.TypeSymlink {
-				symlinkNode := &castorev1pb.SymlinkNode{
-					Name:   []byte(getBasename(hdr.Path)),
-					Target: []byte(hdr.LinkTarget),
-				}
-				if len(stack) > 0 {
-					topOfStack := stack[len(stack)-1].directory
-					topOfStack.Symlinks = append(topOfStack.Symlinks, symlinkNode)
-				} else {
-					rootSymlink = symlinkNode
-				}
-
-			}
-			if hdr.Type == nar.TypeRegular {
-				uploadBlob := func(r io.Reader) ([]byte, error) {
-					// wrap reader with a reader counting the number of bytes read
-					blobCountW := &CountingWriter{}
-					blobReader := io.TeeReader(r, blobCountW)
-
-					blobDigest, err := blobCb(blobReader)
-					if err != nil {
-						return nil, fmt.Errorf("failure from blobCb: %w", err)
-					}
-
-					// ensure blobCb did read all the way to the end.
-					// If it didn't, the blobCb function is wrong and we should bail out.
-					if blobCountW.BytesWritten() != uint64(hdr.Size) {
-						return nil, fmt.Errorf("blobCb did not read all: %d/%d bytes", blobCountW.BytesWritten(), hdr.Size)
-					}
-
-					return blobDigest, nil
-				}
-
-				h := blake3.New(32, nil)
-				blobReader := io.TeeReader(narReader, io.MultiWriter(h))
-				var blobDigest []byte
-
-				// If this file is small enough, read it off the wire immediately and
-				// upload to the blob service asynchronously. This helps reduce the
-				// RTT on blob uploads for NARs with many small files.
-				doAsync := hdr.Size < asyncUploadThreshold
-				if doAsync {
-					blobContents, err := io.ReadAll(blobReader)
-					if err != nil {
-						return nil, 0, nil, fmt.Errorf("read blob: %w", err)
-					}
-
-					blobDigest = h.Sum(nil)
-
-					asyncBlobWg.Go(func() error {
-						blobDigestFromCb, err := uploadBlob(bytes.NewReader(blobContents))
-						if err != nil {
-							return err
-						}
-
-						if !bytes.Equal(blobDigest, blobDigestFromCb) {
-							return fmt.Errorf("unexpected digest (got %x, expected %x)", blobDigestFromCb, blobDigest)
-						}
-
-						return nil
-					})
-				} else {
-					blobDigestFromCb, err := uploadBlob(blobReader)
-					if err != nil {
-						return nil, 0, nil, fmt.Errorf("upload blob: %w", err)
-					}
-
-					blobDigest = h.Sum(nil)
-					if !bytes.Equal(blobDigest, blobDigestFromCb) {
-						return nil, 0, nil, fmt.Errorf("unexpected digest (got %x, expected %x)", blobDigestFromCb, blobDigest)
-					}
-				}
-
-				fileNode := &castorev1pb.FileNode{
-					Name:       []byte(getBasename(hdr.Path)),
-					Digest:     blobDigest,
-					Size:       uint64(hdr.Size),
-					Executable: hdr.Executable,
-				}
-				if len(stack) > 0 {
-					topOfStack := stack[len(stack)-1].directory
-					topOfStack.Files = append(topOfStack.Files, fileNode)
-				} else {
-					rootFile = fileNode
-				}
-			}
-			if hdr.Type == nar.TypeDirectory {
-				directory := &castorev1pb.Directory{
-					Directories: []*castorev1pb.DirectoryNode{},
-					Files:       []*castorev1pb.FileNode{},
-					Symlinks:    []*castorev1pb.SymlinkNode{},
-				}
-				stack = append(stack, stackItem{
-					directory: directory,
-					path:      hdr.Path,
-				})
-			}
-		}
-	}
-}
diff --git a/tvix/nar-bridge-go/pkg/importer/importer_test.go b/tvix/nar-bridge-go/pkg/importer/importer_test.go
deleted file mode 100644
index 313677084f71..000000000000
--- a/tvix/nar-bridge-go/pkg/importer/importer_test.go
+++ /dev/null
@@ -1,537 +0,0 @@
-package importer_test
-
-import (
-	"bytes"
-	"context"
-	"errors"
-	"io"
-	"os"
-	"testing"
-
-	castorev1pb "code.tvl.fyi/tvix/castore-go"
-	"code.tvl.fyi/tvix/nar-bridge-go/pkg/importer"
-	"github.com/stretchr/testify/require"
-)
-
-func TestSymlink(t *testing.T) {
-	f, err := os.Open("../../testdata/symlink.nar")
-	require.NoError(t, err)
-
-	rootNode, narSize, narSha256, err := importer.Import(
-		context.Background(),
-		f,
-		func(blobReader io.Reader) ([]byte, error) {
-			panic("no file contents expected!")
-		}, func(directory *castorev1pb.Directory) ([]byte, error) {
-			panic("no directories expected!")
-		},
-	)
-	require.NoError(t, err)
-	require.Equal(t, &castorev1pb.Node{
-		Node: &castorev1pb.Node_Symlink{
-			Symlink: &castorev1pb.SymlinkNode{
-				Name:   []byte(""),
-				Target: []byte("/nix/store/somewhereelse"),
-			},
-		},
-	}, rootNode)
-	require.Equal(t, []byte{
-		0x09, 0x7d, 0x39, 0x7e, 0x9b, 0x58, 0x26, 0x38, 0x4e, 0xaa, 0x16, 0xc4, 0x57, 0x71, 0x5d, 0x1c, 0x1a, 0x51, 0x67, 0x03, 0x13, 0xea, 0xd0, 0xf5, 0x85, 0x66, 0xe0, 0xb2, 0x32, 0x53, 0x9c, 0xf1,
-	}, narSha256)
-	require.Equal(t, uint64(136), narSize)
-}
-
-func TestRegular(t *testing.T) {
-	f, err := os.Open("../../testdata/onebyteregular.nar")
-	require.NoError(t, err)
-
-	rootNode, narSize, narSha256, err := importer.Import(
-		context.Background(),
-		f,
-		func(blobReader io.Reader) ([]byte, error) {
-			contents, err := io.ReadAll(blobReader)
-			require.NoError(t, err, "reading blobReader should not error")
-			require.Equal(t, []byte{0x01}, contents, "contents read from blobReader should match expectations")
-			return mustBlobDigest(bytes.NewBuffer(contents)), nil
-		}, func(directory *castorev1pb.Directory) ([]byte, error) {
-			panic("no directories expected!")
-		},
-	)
-
-	// The blake3 digest of the 0x01 byte.
-	BLAKE3_DIGEST_0X01 := []byte{
-		0x48, 0xfc, 0x72, 0x1f, 0xbb, 0xc1, 0x72, 0xe0, 0x92, 0x5f, 0xa2, 0x7a, 0xf1, 0x67, 0x1d,
-		0xe2, 0x25, 0xba, 0x92, 0x71, 0x34, 0x80, 0x29, 0x98, 0xb1, 0x0a, 0x15, 0x68, 0xa1, 0x88,
-		0x65, 0x2b,
-	}
-
-	require.NoError(t, err)
-	require.Equal(t, &castorev1pb.Node{
-		Node: &castorev1pb.Node_File{
-			File: &castorev1pb.FileNode{
-				Name:       []byte(""),
-				Digest:     BLAKE3_DIGEST_0X01,
-				Size:       1,
-				Executable: false,
-			},
-		},
-	}, rootNode)
-	require.Equal(t, []byte{
-		0x73, 0x08, 0x50, 0xa8, 0x11, 0x25, 0x9d, 0xbf, 0x3a, 0x68, 0xdc, 0x2e, 0xe8, 0x7a, 0x79, 0xaa, 0x6c, 0xae, 0x9f, 0x71, 0x37, 0x5e, 0xdf, 0x39, 0x6f, 0x9d, 0x7a, 0x91, 0xfb, 0xe9, 0x13, 0x4d,
-	}, narSha256)
-	require.Equal(t, uint64(120), narSize)
-}
-
-func TestEmptyDirectory(t *testing.T) {
-	f, err := os.Open("../../testdata/emptydirectory.nar")
-	require.NoError(t, err)
-
-	expectedDirectory := &castorev1pb.Directory{
-		Directories: []*castorev1pb.DirectoryNode{},
-		Files:       []*castorev1pb.FileNode{},
-		Symlinks:    []*castorev1pb.SymlinkNode{},
-	}
-	rootNode, narSize, narSha256, err := importer.Import(
-		context.Background(),
-		f,
-		func(blobReader io.Reader) ([]byte, error) {
-			panic("no file contents expected!")
-		}, func(directory *castorev1pb.Directory) ([]byte, error) {
-			requireProtoEq(t, expectedDirectory, directory)
-			return mustDirectoryDigest(directory), nil
-		},
-	)
-	require.NoError(t, err)
-	require.Equal(t, &castorev1pb.Node{
-		Node: &castorev1pb.Node_Directory{
-			Directory: &castorev1pb.DirectoryNode{
-				Name:   []byte(""),
-				Digest: mustDirectoryDigest(expectedDirectory),
-				Size:   expectedDirectory.Size(),
-			},
-		},
-	}, rootNode)
-	require.Equal(t, []byte{
-		0xa5, 0x0a, 0x5a, 0xb6, 0xd9, 0x92, 0xf5, 0x59, 0x8e, 0xdd, 0x92, 0x10, 0x50, 0x59, 0xfa, 0xe9, 0xac, 0xfc, 0x19, 0x29, 0x81, 0xe0, 0x8b, 0xd8, 0x85, 0x34, 0xc2, 0x16, 0x7e, 0x92, 0x52, 0x6a,
-	}, narSha256)
-	require.Equal(t, uint64(96), narSize)
-}
-
-func TestFull(t *testing.T) {
-	f, err := os.Open("../../testdata/nar_1094wph9z4nwlgvsd53abfz8i117ykiv5dwnq9nnhz846s7xqd7d.nar")
-	require.NoError(t, err)
-
-	expectedDirectoryPaths := []string{
-		"/bin",
-		"/share/man/man1",
-		"/share/man/man5",
-		"/share/man/man8",
-		"/share/man",
-		"/share",
-		"/",
-	}
-	expectedDirectories := make(map[string]*castorev1pb.Directory, len(expectedDirectoryPaths))
-
-	// /bin is a leaf directory
-	expectedDirectories["/bin"] = &castorev1pb.Directory{
-		Directories: []*castorev1pb.DirectoryNode{},
-		Files: []*castorev1pb.FileNode{
-			{
-				Name: []byte("arp"),
-				Digest: []byte{
-					0xfb, 0xc4, 0x61, 0x4a, 0x29, 0x27, 0x11, 0xcb, 0xcc, 0xe4, 0x99, 0x81, 0x9c, 0xf0, 0xa9, 0x17, 0xf7, 0xd0, 0x91, 0xbe, 0xea, 0x08, 0xcb, 0x5b, 0xaa, 0x76, 0x76, 0xf5, 0xee, 0x4f, 0x82, 0xbb,
-				},
-				Size:       55288,
-				Executable: true,
-			},
-			{
-				Name: []byte("hostname"),
-				Digest: []byte{
-					0x9c, 0x6a, 0xe4, 0xb5, 0xe4, 0x6c, 0xb5, 0x67, 0x45, 0x0e, 0xaa, 0x2a, 0xd8, 0xdd, 0x9b, 0x38, 0xd7, 0xed, 0x01, 0x02, 0x84, 0xf7, 0x26, 0xe1, 0xc7, 0xf3, 0x1c, 0xeb, 0xaa, 0x8a, 0x01, 0x30,
-				},
-				Size:       17704,
-				Executable: true,
-			},
-			{
-				Name: []byte("ifconfig"),
-				Digest: []byte{
-					0x25, 0xbe, 0x3b, 0x1d, 0xf4, 0x1a, 0x45, 0x42, 0x79, 0x09, 0x2c, 0x2a, 0x83, 0xf0, 0x0b, 0xff, 0xe8, 0xc0, 0x9c, 0x26, 0x98, 0x70, 0x15, 0x4d, 0xa8, 0xca, 0x05, 0xfe, 0x92, 0x68, 0x35, 0x2e,
-				},
-				Size:       72576,
-				Executable: true,
-			},
-			{
-				Name: []byte("nameif"),
-				Digest: []byte{
-					0x8e, 0xaa, 0xc5, 0xdb, 0x71, 0x08, 0x8e, 0xe5, 0xe6, 0x30, 0x1f, 0x2c, 0x3a, 0xf2, 0x42, 0x39, 0x0c, 0x57, 0x15, 0xaf, 0x50, 0xaa, 0x1c, 0xdf, 0x84, 0x22, 0x08, 0x77, 0x03, 0x54, 0x62, 0xb1,
-				},
-				Size:       18776,
-				Executable: true,
-			},
-			{
-				Name: []byte("netstat"),
-				Digest: []byte{
-					0x13, 0x34, 0x7e, 0xdd, 0x2a, 0x9a, 0x17, 0x0b, 0x3f, 0xc7, 0x0a, 0xe4, 0x92, 0x89, 0x25, 0x9f, 0xaa, 0xb5, 0x05, 0x6b, 0x24, 0xa7, 0x91, 0xeb, 0xaf, 0xf9, 0xe9, 0x35, 0x56, 0xaa, 0x2f, 0xb2,
-				},
-				Size:       131784,
-				Executable: true,
-			},
-			{
-				Name: []byte("plipconfig"),
-				Digest: []byte{
-					0x19, 0x7c, 0x80, 0xdc, 0x81, 0xdc, 0xb4, 0xc0, 0x45, 0xe1, 0xf9, 0x76, 0x51, 0x4f, 0x50, 0xbf, 0xa4, 0x69, 0x51, 0x9a, 0xd4, 0xa9, 0xe7, 0xaa, 0xe7, 0x0d, 0x53, 0x32, 0xff, 0x28, 0x40, 0x60,
-				},
-				Size:       13160,
-				Executable: true,
-			},
-			{
-				Name: []byte("rarp"),
-				Digest: []byte{
-					0x08, 0x85, 0xb4, 0x85, 0x03, 0x2b, 0x3c, 0x7a, 0x3e, 0x24, 0x4c, 0xf8, 0xcc, 0x45, 0x01, 0x9e, 0x79, 0x43, 0x8c, 0x6f, 0x5e, 0x32, 0x46, 0x54, 0xb6, 0x68, 0x91, 0x8e, 0xa0, 0xcb, 0x6e, 0x0d,
-				},
-				Size:       30384,
-				Executable: true,
-			},
-			{
-				Name: []byte("route"),
-				Digest: []byte{
-					0x4d, 0x14, 0x20, 0x89, 0x9e, 0x76, 0xf4, 0xe2, 0x92, 0x53, 0xee, 0x9b, 0x78, 0x7d, 0x23, 0x80, 0x6c, 0xff, 0xe6, 0x33, 0xdc, 0x4a, 0x10, 0x29, 0x39, 0x02, 0xa0, 0x60, 0xff, 0xe2, 0xbb, 0xd7,
-				},
-				Size:       61928,
-				Executable: true,
-			},
-			{
-				Name: []byte("slattach"),
-				Digest: []byte{
-					0xfb, 0x25, 0xc3, 0x73, 0xb7, 0xb1, 0x0b, 0x25, 0xcd, 0x7b, 0x62, 0xf6, 0x71, 0x83, 0xfe, 0x36, 0x80, 0xf6, 0x48, 0xc3, 0xdb, 0xd8, 0x0c, 0xfe, 0xb8, 0xd3, 0xda, 0x32, 0x9b, 0x47, 0x4b, 0x05,
-				},
-				Size:       35672,
-				Executable: true,
-			},
-		},
-		Symlinks: []*castorev1pb.SymlinkNode{
-			{
-				Name:   []byte("dnsdomainname"),
-				Target: []byte("hostname"),
-			},
-			{
-				Name:   []byte("domainname"),
-				Target: []byte("hostname"),
-			},
-			{
-				Name:   []byte("nisdomainname"),
-				Target: []byte("hostname"),
-			},
-			{
-				Name:   []byte("ypdomainname"),
-				Target: []byte("hostname"),
-			},
-		},
-	}
-
-	// /share/man/man1 is a leaf directory.
-	// The parser traversed over /sbin, but only added it to / which is still on the stack.
-	expectedDirectories["/share/man/man1"] = &castorev1pb.Directory{
-		Directories: []*castorev1pb.DirectoryNode{},
-		Files: []*castorev1pb.FileNode{
-			{
-				Name: []byte("dnsdomainname.1.gz"),
-				Digest: []byte{
-					0x98, 0x8a, 0xbd, 0xfa, 0x64, 0xd5, 0xb9, 0x27, 0xfe, 0x37, 0x43, 0x56, 0xb3, 0x18, 0xc7, 0x2b, 0xcb, 0xe3, 0x17, 0x1c, 0x17, 0xf4, 0x17, 0xeb, 0x4a, 0xa4, 0x99, 0x64, 0x39, 0xca, 0x2d, 0xee,
-				},
-				Size:       40,
-				Executable: false,
-			},
-			{
-				Name: []byte("domainname.1.gz"),
-				Digest: []byte{
-					0x98, 0x8a, 0xbd, 0xfa, 0x64, 0xd5, 0xb9, 0x27, 0xfe, 0x37, 0x43, 0x56, 0xb3, 0x18, 0xc7, 0x2b, 0xcb, 0xe3, 0x17, 0x1c, 0x17, 0xf4, 0x17, 0xeb, 0x4a, 0xa4, 0x99, 0x64, 0x39, 0xca, 0x2d, 0xee,
-				},
-				Size:       40,
-				Executable: false,
-			},
-			{
-				Name: []byte("hostname.1.gz"),
-				Digest: []byte{
-					0xbf, 0x89, 0xe6, 0x28, 0x00, 0x24, 0x66, 0x79, 0x70, 0x04, 0x38, 0xd6, 0xdd, 0x9d, 0xf6, 0x0e, 0x0d, 0xee, 0x00, 0xf7, 0x64, 0x4f, 0x05, 0x08, 0x9d, 0xf0, 0x36, 0xde, 0x85, 0xf4, 0x75, 0xdb,
-				},
-				Size:       1660,
-				Executable: false,
-			},
-			{
-				Name: []byte("nisdomainname.1.gz"),
-				Digest: []byte{
-					0x98, 0x8a, 0xbd, 0xfa, 0x64, 0xd5, 0xb9, 0x27, 0xfe, 0x37, 0x43, 0x56, 0xb3, 0x18, 0xc7, 0x2b, 0xcb, 0xe3, 0x17, 0x1c, 0x17, 0xf4, 0x17, 0xeb, 0x4a, 0xa4, 0x99, 0x64, 0x39, 0xca, 0x2d, 0xee,
-				},
-				Size:       40,
-				Executable: false,
-			},
-			{
-				Name: []byte("ypdomainname.1.gz"),
-				Digest: []byte{
-					0x98, 0x8a, 0xbd, 0xfa, 0x64, 0xd5, 0xb9, 0x27, 0xfe, 0x37, 0x43, 0x56, 0xb3, 0x18, 0xc7, 0x2b, 0xcb, 0xe3, 0x17, 0x1c, 0x17, 0xf4, 0x17, 0xeb, 0x4a, 0xa4, 0x99, 0x64, 0x39, 0xca, 0x2d, 0xee,
-				},
-				Size:       40,
-				Executable: false,
-			},
-		},
-		Symlinks: []*castorev1pb.SymlinkNode{},
-	}
-
-	// /share/man/man5 is a leaf directory
-	expectedDirectories["/share/man/man5"] = &castorev1pb.Directory{
-		Directories: []*castorev1pb.DirectoryNode{},
-		Files: []*castorev1pb.FileNode{
-			{
-				Name: []byte("ethers.5.gz"),
-				Digest: []byte{
-					0x42, 0x63, 0x8c, 0xc4, 0x18, 0x93, 0xcf, 0x60, 0xd6, 0xff, 0x43, 0xbc, 0x16, 0xb4, 0xfd, 0x22, 0xd2, 0xf2, 0x05, 0x0b, 0x52, 0xdc, 0x6a, 0x6b, 0xff, 0x34, 0xe2, 0x6a, 0x38, 0x3a, 0x07, 0xe3,
-				},
-				Size:       563,
-				Executable: false,
-			},
-		},
-		Symlinks: []*castorev1pb.SymlinkNode{},
-	}
-
-	// /share/man/man8 is a leaf directory
-	expectedDirectories["/share/man/man8"] = &castorev1pb.Directory{
-		Directories: []*castorev1pb.DirectoryNode{},
-		Files: []*castorev1pb.FileNode{
-			{
-				Name: []byte("arp.8.gz"),
-				Digest: []byte{
-					0xf5, 0x35, 0x4e, 0xf5, 0xf6, 0x44, 0xf7, 0x52, 0x0f, 0x42, 0xa0, 0x26, 0x51, 0xd9, 0x89, 0xf9, 0x68, 0xf2, 0xef, 0xeb, 0xba, 0xe1, 0xf4, 0x55, 0x01, 0x57, 0x77, 0xb7, 0x68, 0x55, 0x92, 0xef,
-				},
-				Size:       2464,
-				Executable: false,
-			},
-			{
-				Name: []byte("ifconfig.8.gz"),
-				Digest: []byte{
-					0x18, 0x65, 0x25, 0x11, 0x32, 0xee, 0x77, 0x91, 0x35, 0x4c, 0x3c, 0x24, 0xdb, 0xaf, 0x66, 0xdb, 0xfc, 0x17, 0x7b, 0xba, 0xe1, 0x3d, 0x05, 0xd2, 0xca, 0x6e, 0x2c, 0xe4, 0xef, 0xb8, 0xa8, 0xbe,
-				},
-				Size:       3382,
-				Executable: false,
-			},
-			{
-				Name: []byte("nameif.8.gz"),
-				Digest: []byte{
-					0x73, 0xc1, 0x27, 0xe8, 0x3b, 0xa8, 0x49, 0xdc, 0x0e, 0xdf, 0x70, 0x5f, 0xaf, 0x06, 0x01, 0x2c, 0x62, 0xe9, 0x18, 0x67, 0x01, 0x94, 0x64, 0x26, 0xca, 0x95, 0x22, 0xc0, 0xdc, 0xe4, 0x42, 0xb6,
-				},
-				Size:       523,
-				Executable: false,
-			},
-			{
-				Name: []byte("netstat.8.gz"),
-				Digest: []byte{
-					0xc0, 0x86, 0x43, 0x4a, 0x43, 0x57, 0xaa, 0x84, 0xa7, 0x24, 0xa0, 0x7c, 0x65, 0x38, 0x46, 0x1c, 0xf2, 0x45, 0xa2, 0xef, 0x12, 0x44, 0x18, 0xba, 0x52, 0x56, 0xe9, 0x8e, 0x6a, 0x0f, 0x70, 0x63,
-				},
-				Size:       4284,
-				Executable: false,
-			},
-			{
-				Name: []byte("plipconfig.8.gz"),
-				Digest: []byte{
-					0x2a, 0xd9, 0x1d, 0xa8, 0x9e, 0x0d, 0x05, 0xd0, 0xb0, 0x49, 0xaa, 0x64, 0xba, 0x29, 0x28, 0xc6, 0x45, 0xe1, 0xbb, 0x5e, 0x72, 0x8d, 0x48, 0x7b, 0x09, 0x4f, 0x0a, 0x82, 0x1e, 0x26, 0x83, 0xab,
-				},
-				Size:       889,
-				Executable: false,
-			},
-			{
-				Name: []byte("rarp.8.gz"),
-				Digest: []byte{
-					0x3d, 0x51, 0xc1, 0xd0, 0x6a, 0x59, 0x1e, 0x6d, 0x9a, 0xf5, 0x06, 0xd2, 0xe7, 0x7d, 0x7d, 0xd0, 0x70, 0x3d, 0x84, 0x64, 0xc3, 0x7d, 0xfb, 0x10, 0x84, 0x3b, 0xe1, 0xa9, 0xdf, 0x46, 0xee, 0x9f,
-				},
-				Size:       1198,
-				Executable: false,
-			},
-			{
-				Name: []byte("route.8.gz"),
-				Digest: []byte{
-					0x2a, 0x5a, 0x4b, 0x4f, 0x91, 0xf2, 0x78, 0xe4, 0xa9, 0x25, 0xb2, 0x7f, 0xa7, 0x2a, 0xc0, 0x8a, 0x4a, 0x65, 0xc9, 0x5f, 0x07, 0xa0, 0x48, 0x44, 0xeb, 0x46, 0xf9, 0xc9, 0xe1, 0x17, 0x96, 0x21,
-				},
-				Size:       3525,
-				Executable: false,
-			},
-			{
-				Name: []byte("slattach.8.gz"),
-				Digest: []byte{
-					0x3f, 0x05, 0x6b, 0x20, 0xe1, 0xe4, 0xf0, 0xba, 0x16, 0x15, 0x66, 0x6b, 0x57, 0x96, 0xe9, 0x9d, 0x83, 0xa8, 0x20, 0xaf, 0x8a, 0xca, 0x16, 0x4d, 0xa2, 0x6d, 0x94, 0x8e, 0xca, 0x91, 0x8f, 0xd4,
-				},
-				Size:       1441,
-				Executable: false,
-			},
-		},
-		Symlinks: []*castorev1pb.SymlinkNode{},
-	}
-
-	// /share/man holds /share/man/man{1,5,8}.
-	expectedDirectories["/share/man"] = &castorev1pb.Directory{
-		Directories: []*castorev1pb.DirectoryNode{
-			{
-				Name:   []byte("man1"),
-				Digest: mustDirectoryDigest(expectedDirectories["/share/man/man1"]),
-				Size:   expectedDirectories["/share/man/man1"].Size(),
-			},
-			{
-				Name:   []byte("man5"),
-				Digest: mustDirectoryDigest(expectedDirectories["/share/man/man5"]),
-				Size:   expectedDirectories["/share/man/man5"].Size(),
-			},
-			{
-				Name:   []byte("man8"),
-				Digest: mustDirectoryDigest(expectedDirectories["/share/man/man8"]),
-				Size:   expectedDirectories["/share/man/man8"].Size(),
-			},
-		},
-		Files:    []*castorev1pb.FileNode{},
-		Symlinks: []*castorev1pb.SymlinkNode{},
-	}
-
-	// /share holds /share/man.
-	expectedDirectories["/share"] = &castorev1pb.Directory{
-		Directories: []*castorev1pb.DirectoryNode{
-			{
-				Name:   []byte("man"),
-				Digest: mustDirectoryDigest(expectedDirectories["/share/man"]),
-				Size:   expectedDirectories["/share/man"].Size(),
-			},
-		},
-		Files:    []*castorev1pb.FileNode{},
-		Symlinks: []*castorev1pb.SymlinkNode{},
-	}
-
-	// / holds /bin, /share, and a /sbin symlink.
-	expectedDirectories["/"] = &castorev1pb.Directory{
-		Directories: []*castorev1pb.DirectoryNode{
-			{
-				Name:   []byte("bin"),
-				Digest: mustDirectoryDigest(expectedDirectories["/bin"]),
-				Size:   expectedDirectories["/bin"].Size(),
-			},
-			{
-				Name:   []byte("share"),
-				Digest: mustDirectoryDigest(expectedDirectories["/share"]),
-				Size:   expectedDirectories["/share"].Size(),
-			},
-		},
-		Files: []*castorev1pb.FileNode{},
-		Symlinks: []*castorev1pb.SymlinkNode{
-			{
-				Name:   []byte("sbin"),
-				Target: []byte("bin"),
-			},
-		},
-	}
-	// assert we populated the two fixtures properly
-	require.Equal(t, len(expectedDirectoryPaths), len(expectedDirectories))
-
-	numDirectoriesReceived := 0
-
-	rootNode, narSize, narSha256, err := importer.Import(
-		context.Background(),
-		f,
-		func(blobReader io.Reader) ([]byte, error) {
-			// Don't really bother reading and comparing the contents here,
-			// We already verify the right digests are produced by comparing the
-			// directoryCb calls, and TestRegular ensures the reader works.
-			return mustBlobDigest(blobReader), nil
-		}, func(directory *castorev1pb.Directory) ([]byte, error) {
-			// use actualDirectoryOrder to look up the Directory object we expect at this specific invocation.
-			currentDirectoryPath := expectedDirectoryPaths[numDirectoriesReceived]
-
-			expectedDirectory, found := expectedDirectories[currentDirectoryPath]
-			require.True(t, found, "must find the current directory")
-
-			requireProtoEq(t, expectedDirectory, directory)
-
-			numDirectoriesReceived += 1
-			return mustDirectoryDigest(directory), nil
-		},
-	)
-	require.NoError(t, err)
-	require.Equal(t, &castorev1pb.Node{
-		Node: &castorev1pb.Node_Directory{
-			Directory: &castorev1pb.DirectoryNode{
-				Name:   []byte(""),
-				Digest: mustDirectoryDigest(expectedDirectories["/"]),
-				Size:   expectedDirectories["/"].Size(),
-			},
-		},
-	}, rootNode)
-	require.Equal(t, []byte{
-		0xc6, 0xe1, 0x55, 0xb3, 0x45, 0x6e, 0x30, 0xb7, 0x61, 0x22, 0x63, 0xec, 0x09, 0x50, 0x70, 0x81, 0x1c, 0xaf, 0x8a, 0xbf, 0xd5, 0x9f, 0xaa, 0x72, 0xab, 0x82, 0xa5, 0x92, 0xef, 0xde, 0xb2, 0x53,
-	}, narSha256)
-	require.Equal(t, uint64(464152), narSize)
-}
-
-// TestCallbackErrors ensures that errors returned from the callback function
-// bubble up to the importer process, and are not ignored.
-func TestCallbackErrors(t *testing.T) {
-	t.Run("callback blob", func(t *testing.T) {
-		// Pick an example NAR with a regular file.
-		f, err := os.Open("../../testdata/onebyteregular.nar")
-		require.NoError(t, err)
-
-		targetErr := errors.New("expected error")
-
-		_, _, _, err = importer.Import(
-			context.Background(),
-			f,
-			func(blobReader io.Reader) ([]byte, error) {
-				return nil, targetErr
-			}, func(directory *castorev1pb.Directory) ([]byte, error) {
-				panic("no directories expected!")
-			},
-		)
-		require.ErrorIs(t, err, targetErr)
-	})
-	t.Run("callback directory", func(t *testing.T) {
-		// Pick an example NAR with a directory node
-		f, err := os.Open("../../testdata/emptydirectory.nar")
-		require.NoError(t, err)
-
-		targetErr := errors.New("expected error")
-
-		_, _, _, err = importer.Import(
-			context.Background(),
-			f,
-			func(blobReader io.Reader) ([]byte, error) {
-				panic("no file contents expected!")
-			}, func(directory *castorev1pb.Directory) ([]byte, error) {
-				return nil, targetErr
-			},
-		)
-		require.ErrorIs(t, err, targetErr)
-	})
-}
-
-// TestPopDirectories is a regression test that ensures we handle the directory
-// stack properly.
-//
-// This test case looks like:
-//
-// / (dir)
-// /test (dir)
-// /test/tested (file)
-// /tested (file)
-//
-// We used to have a bug where the second `tested` file would appear as if
-// it was in the `/test` dir because it has that dir as a string prefix.
-func TestPopDirectories(t *testing.T) {
-	f, err := os.Open("../../testdata/popdirectories.nar")
-	require.NoError(t, err)
-	defer f.Close()
-
-	_, _, _, err = importer.Import(
-		context.Background(),
-		f,
-		func(blobReader io.Reader) ([]byte, error) { return mustBlobDigest(blobReader), nil },
-		func(directory *castorev1pb.Directory) ([]byte, error) {
-			require.NoError(t, directory.Validate(), "directory validation shouldn't error")
-			return mustDirectoryDigest(directory), nil
-		},
-	)
-	require.NoError(t, err)
-}
diff --git a/tvix/nar-bridge-go/pkg/importer/roundtrip_test.go b/tvix/nar-bridge-go/pkg/importer/roundtrip_test.go
deleted file mode 100644
index c50d332d85dc..000000000000
--- a/tvix/nar-bridge-go/pkg/importer/roundtrip_test.go
+++ /dev/null
@@ -1,85 +0,0 @@
-package importer_test
-
-import (
-	"bytes"
-	"context"
-	"encoding/base64"
-	"fmt"
-	"io"
-	"os"
-	"sync"
-	"testing"
-
-	castorev1pb "code.tvl.fyi/tvix/castore-go"
-	"code.tvl.fyi/tvix/nar-bridge-go/pkg/importer"
-	storev1pb "code.tvl.fyi/tvix/store-go"
-	"github.com/stretchr/testify/require"
-)
-
-func TestRoundtrip(t *testing.T) {
-	// We pipe nar_1094wph9z4nwlgvsd53abfz8i117ykiv5dwnq9nnhz846s7xqd7d.nar to
-	// storev1pb.Export, and store all the file contents and directory objects
-	// received in two hashmaps.
-	// We then feed it to the writer, and test we come up with the same NAR file.
-
-	f, err := os.Open("../../testdata/nar_1094wph9z4nwlgvsd53abfz8i117ykiv5dwnq9nnhz846s7xqd7d.nar")
-	require.NoError(t, err)
-
-	narContents, err := io.ReadAll(f)
-	require.NoError(t, err)
-
-	var mu sync.Mutex
-	blobsMap := make(map[string][]byte, 0)
-	directoriesMap := make(map[string]*castorev1pb.Directory)
-
-	rootNode, _, _, err := importer.Import(
-		context.Background(),
-		bytes.NewBuffer(narContents),
-		func(blobReader io.Reader) ([]byte, error) {
-			// read in contents, we need to put it into filesMap later.
-			contents, err := io.ReadAll(blobReader)
-			require.NoError(t, err)
-
-			dgst := mustBlobDigest(bytes.NewReader(contents))
-
-			// put it in filesMap
-			mu.Lock()
-			blobsMap[base64.StdEncoding.EncodeToString(dgst)] = contents
-			mu.Unlock()
-
-			return dgst, nil
-		},
-		func(directory *castorev1pb.Directory) ([]byte, error) {
-			dgst := mustDirectoryDigest(directory)
-
-			directoriesMap[base64.StdEncoding.EncodeToString(dgst)] = directory
-			return dgst, nil
-		},
-	)
-
-	require.NoError(t, err)
-
-	// done populating everything, now actually test the export :-)
-	var narBuf bytes.Buffer
-	err = storev1pb.Export(
-		&narBuf,
-		rootNode,
-		func(directoryDgst []byte) (*castorev1pb.Directory, error) {
-			d, found := directoriesMap[base64.StdEncoding.EncodeToString(directoryDgst)]
-			if !found {
-				panic(fmt.Sprintf("directory %v not found", base64.StdEncoding.EncodeToString(directoryDgst)))
-			}
-			return d, nil
-		},
-		func(blobDgst []byte) (io.ReadCloser, error) {
-			blobContents, found := blobsMap[base64.StdEncoding.EncodeToString(blobDgst)]
-			if !found {
-				panic(fmt.Sprintf("blob      %v not found", base64.StdEncoding.EncodeToString(blobDgst)))
-			}
-			return io.NopCloser(bytes.NewReader(blobContents)), nil
-		},
-	)
-
-	require.NoError(t, err, "exporter shouldn't fail")
-	require.Equal(t, narContents, narBuf.Bytes())
-}
diff --git a/tvix/nar-bridge-go/pkg/importer/util_test.go b/tvix/nar-bridge-go/pkg/importer/util_test.go
deleted file mode 100644
index 06353cf582e5..000000000000
--- a/tvix/nar-bridge-go/pkg/importer/util_test.go
+++ /dev/null
@@ -1,34 +0,0 @@
-package importer_test
-
-import (
-	"io"
-	"testing"
-
-	castorev1pb "code.tvl.fyi/tvix/castore-go"
-	"github.com/google/go-cmp/cmp"
-	"google.golang.org/protobuf/testing/protocmp"
-	"lukechampine.com/blake3"
-)
-
-func requireProtoEq(t *testing.T, expected interface{}, actual interface{}) {
-	if diff := cmp.Diff(expected, actual, protocmp.Transform()); diff != "" {
-		t.Errorf("unexpected difference:\n%v", diff)
-	}
-}
-
-func mustDirectoryDigest(d *castorev1pb.Directory) []byte {
-	dgst, err := d.Digest()
-	if err != nil {
-		panic(err)
-	}
-	return dgst
-}
-
-func mustBlobDigest(r io.Reader) []byte {
-	hasher := blake3.New(32, nil)
-	_, err := io.Copy(hasher, r)
-	if err != nil {
-		panic(err)
-	}
-	return hasher.Sum([]byte{})
-}
diff --git a/tvix/nar-bridge-go/testdata/emptydirectory.nar b/tvix/nar-bridge-go/testdata/emptydirectory.nar
deleted file mode 100644
index baba55862255..000000000000
--- a/tvix/nar-bridge-go/testdata/emptydirectory.nar
+++ /dev/null
Binary files differdiff --git a/tvix/nar-bridge-go/testdata/nar_1094wph9z4nwlgvsd53abfz8i117ykiv5dwnq9nnhz846s7xqd7d.nar b/tvix/nar-bridge-go/testdata/nar_1094wph9z4nwlgvsd53abfz8i117ykiv5dwnq9nnhz846s7xqd7d.nar
deleted file mode 100644
index 6cb0b16e5d5d..000000000000
--- a/tvix/nar-bridge-go/testdata/nar_1094wph9z4nwlgvsd53abfz8i117ykiv5dwnq9nnhz846s7xqd7d.nar
+++ /dev/null
Binary files differdiff --git a/tvix/nar-bridge-go/testdata/onebyteexecutable.nar b/tvix/nar-bridge-go/testdata/onebyteexecutable.nar
deleted file mode 100644
index 68682196665c..000000000000
--- a/tvix/nar-bridge-go/testdata/onebyteexecutable.nar
+++ /dev/null
Binary files differdiff --git a/tvix/nar-bridge-go/testdata/onebyteregular.nar b/tvix/nar-bridge-go/testdata/onebyteregular.nar
deleted file mode 100644
index b8c94932bf0c..000000000000
--- a/tvix/nar-bridge-go/testdata/onebyteregular.nar
+++ /dev/null
Binary files differdiff --git a/tvix/nar-bridge-go/testdata/popdirectories.nar b/tvix/nar-bridge-go/testdata/popdirectories.nar
deleted file mode 100644
index 74313aca529f..000000000000
--- a/tvix/nar-bridge-go/testdata/popdirectories.nar
+++ /dev/null
Binary files differdiff --git a/tvix/nar-bridge-go/testdata/symlink.nar b/tvix/nar-bridge-go/testdata/symlink.nar
deleted file mode 100644
index 7990e4ad5bc2..000000000000
--- a/tvix/nar-bridge-go/testdata/symlink.nar
+++ /dev/null
Binary files differdiff --git a/tvix/nar-bridge/Cargo.toml b/tvix/nar-bridge/Cargo.toml
new file mode 100644
index 000000000000..6ca0479a9a81
--- /dev/null
+++ b/tvix/nar-bridge/Cargo.toml
@@ -0,0 +1,46 @@
+[package]
+name = "nar-bridge"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+axum = { workspace = true, features = ["http2"] }
+tower = { workspace = true }
+tower-http = { workspace = true, features = ["trace"] }
+bytes = { workspace = true }
+clap = { workspace = true, features = ["derive", "env"] }
+data-encoding = { workspace = true }
+futures = { workspace = true }
+itertools = { workspace = true }
+prost = { workspace = true }
+nix-compat = { path = "../nix-compat", features = ["async"] }
+thiserror = { workspace = true }
+tokio = { workspace = true }
+tokio-listener = { workspace = true, features = ["axum07", "clap", "multi-listener", "sd_listen"] }
+tokio-util = { workspace = true, features = ["io", "io-util", "compat"] }
+tonic = { workspace = true, features = ["tls", "tls-roots"] }
+tvix-castore = { path = "../castore" }
+tvix-store = { path = "../store" }
+tvix-tracing = { path = "../tracing", features = ["tonic", "axum"] }
+tracing = { workspace = true }
+tracing-subscriber = { workspace = true }
+url = { workspace = true }
+serde = { workspace = true, features = ["derive"] }
+lru = { workspace = true }
+parking_lot = { workspace = true }
+mimalloc = { workspace = true }
+
+[build-dependencies]
+prost-build = { workspace = true }
+tonic-build = { workspace = true }
+
+[features]
+default = ["otlp"]
+otlp = ["tvix-tracing/otlp"]
+
+[dev-dependencies]
+hex-literal = { workspace = true }
+rstest = { workspace = true }
+
+[lints]
+workspace = true
diff --git a/tvix/nar-bridge/default.nix b/tvix/nar-bridge/default.nix
new file mode 100644
index 000000000000..2f1384e8211f
--- /dev/null
+++ b/tvix/nar-bridge/default.nix
@@ -0,0 +1,11 @@
+{ depot, lib, ... }:
+
+(depot.tvix.crates.workspaceMembers.nar-bridge.build.override {
+  runTests = true;
+}).overrideAttrs (old: rec {
+  meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+  passthru = old.passthru // (depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = [ "otlp" ];
+  });
+})
diff --git a/tvix/nar-bridge/src/bin/nar-bridge.rs b/tvix/nar-bridge/src/bin/nar-bridge.rs
new file mode 100644
index 000000000000..48eba0ac44ac
--- /dev/null
+++ b/tvix/nar-bridge/src/bin/nar-bridge.rs
@@ -0,0 +1,92 @@
+use clap::Parser;
+use mimalloc::MiMalloc;
+use nar_bridge::AppState;
+use tower::ServiceBuilder;
+use tower_http::trace::{DefaultMakeSpan, TraceLayer};
+use tracing::info;
+use tvix_store::utils::ServiceUrlsGrpc;
+
+#[global_allocator]
+static GLOBAL: MiMalloc = MiMalloc;
+
+/// Expose the Nix HTTP Binary Cache protocol for a tvix-store.
+#[derive(Parser)]
+#[command(author, version, about, long_about = None)]
+struct Cli {
+    #[clap(flatten)]
+    service_addrs: ServiceUrlsGrpc,
+
+    /// The priority to announce at the `nix-cache-info` endpoint.
+    /// A lower number means it's *more preferred.
+    #[arg(long, env, default_value_t = 39)]
+    priority: u64,
+
+    /// The address to listen on.
+    #[clap(flatten)]
+    listen_args: tokio_listener::ListenerAddressLFlag,
+
+    #[cfg(feature = "otlp")]
+    /// Whether to configure OTLP. Set --otlp=false to disable.
+    #[arg(long, default_missing_value = "true", default_value = "true", num_args(0..=1), require_equals(true), action(clap::ArgAction::Set))]
+    otlp: bool,
+}
+
+#[tokio::main]
+async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
+    let cli = Cli::parse();
+
+    let _tracing_handle = {
+        #[allow(unused_mut)]
+        let mut builder = tvix_tracing::TracingBuilder::default();
+        #[cfg(feature = "otlp")]
+        {
+            if cli.otlp {
+                builder = builder.enable_otlp("tvix.store");
+            }
+        }
+        builder.build()?
+    };
+
+    // initialize stores
+    let (blob_service, directory_service, path_info_service, _nar_calculation_service) =
+        tvix_store::utils::construct_services(cli.service_addrs).await?;
+
+    let state = AppState::new(blob_service, directory_service, path_info_service);
+
+    let app = nar_bridge::gen_router(cli.priority)
+        .layer(
+            ServiceBuilder::new()
+                .layer(
+                    TraceLayer::new_for_http().make_span_with(
+                        DefaultMakeSpan::new()
+                            .level(tracing::Level::INFO)
+                            .include_headers(true),
+                    ),
+                )
+                .map_request(tvix_tracing::propagate::axum::accept_trace),
+        )
+        .with_state(state);
+
+    let listen_address = &cli.listen_args.listen_address.unwrap_or_else(|| {
+        "[::]:9000"
+            .parse()
+            .expect("invalid fallback listen address")
+    });
+
+    let listener = tokio_listener::Listener::bind(
+        listen_address,
+        &Default::default(),
+        &cli.listen_args.listener_options,
+    )
+    .await?;
+
+    info!(listen_address=%listen_address, "starting daemon");
+
+    tokio_listener::axum07::serve(
+        listener,
+        app.into_make_service_with_connect_info::<tokio_listener::SomeSocketAddrClonable>(),
+    )
+    .await?;
+
+    Ok(())
+}
diff --git a/tvix/nar-bridge/src/lib.rs b/tvix/nar-bridge/src/lib.rs
new file mode 100644
index 000000000000..79f9b7372245
--- /dev/null
+++ b/tvix/nar-bridge/src/lib.rs
@@ -0,0 +1,84 @@
+use axum::http::StatusCode;
+use axum::response::IntoResponse;
+use axum::routing::{head, put};
+use axum::{routing::get, Router};
+use lru::LruCache;
+use nix_compat::nix_http;
+use parking_lot::RwLock;
+use std::num::NonZeroUsize;
+use std::sync::Arc;
+use tvix_castore::blobservice::BlobService;
+use tvix_castore::directoryservice::DirectoryService;
+use tvix_castore::Node;
+use tvix_store::pathinfoservice::PathInfoService;
+
+mod nar;
+mod narinfo;
+
+/// The capacity of the lookup table from NarHash to [Node].
+/// Should be bigger than the number of concurrent NAR upload.
+/// Cannot be [NonZeroUsize] here due to rust-analyzer going bananas.
+/// SAFETY: 1000 != 0
+const ROOT_NODES_CACHE_CAPACITY: usize = 1000;
+
+#[derive(Clone)]
+pub struct AppState {
+    blob_service: Arc<dyn BlobService>,
+    directory_service: Arc<dyn DirectoryService>,
+    path_info_service: Arc<dyn PathInfoService>,
+
+    /// Lookup table from NarHash to [Node], necessary to populate the root_node
+    /// field of the PathInfo when processing the narinfo upload.
+    root_nodes: Arc<RwLock<LruCache<[u8; 32], Node>>>,
+}
+
+impl AppState {
+    pub fn new(
+        blob_service: Arc<dyn BlobService>,
+        directory_service: Arc<dyn DirectoryService>,
+        path_info_service: Arc<dyn PathInfoService>,
+    ) -> Self {
+        Self {
+            blob_service,
+            directory_service,
+            path_info_service,
+            root_nodes: Arc::new(RwLock::new(LruCache::new({
+                // SAFETY: 1000 != 0
+                unsafe { NonZeroUsize::new_unchecked(ROOT_NODES_CACHE_CAPACITY) }
+            }))),
+        }
+    }
+}
+
+pub fn gen_router(priority: u64) -> Router<AppState> {
+    Router::new()
+        .route("/", get(root))
+        // FUTUREWORK: respond for NARs that we still have in root_nodes (at least HEAD)
+        // This avoids some unnecessary NAR uploading from multiple concurrent clients, and is cheap.
+        .route("/nar/:nar_str", get(four_o_four))
+        .route("/nar/:nar_str", head(four_o_four))
+        .route("/nar/:nar_str", put(nar::put))
+        .route("/nar/tvix-castore/:root_node_enc", get(nar::get))
+        .route("/:narinfo_str", get(narinfo::get))
+        .route("/:narinfo_str", head(narinfo::head))
+        .route("/:narinfo_str", put(narinfo::put))
+        .route("/nix-cache-info", get(move || nix_cache_info(priority)))
+}
+
+async fn root() -> &'static str {
+    "Hello from nar-bridge"
+}
+
+async fn four_o_four() -> Result<(), StatusCode> {
+    Err(StatusCode::NOT_FOUND)
+}
+
+async fn nix_cache_info(priority: u64) -> impl IntoResponse {
+    (
+        [("Content-Type", nix_http::MIME_TYPE_CACHE_INFO)],
+        format!(
+            "StoreDir: /nix/store\nWantMassQuery: 1\nPriority: {}\n",
+            priority
+        ),
+    )
+}
diff --git a/tvix/nar-bridge/src/nar.rs b/tvix/nar-bridge/src/nar.rs
new file mode 100644
index 000000000000..70d9d644c26c
--- /dev/null
+++ b/tvix/nar-bridge/src/nar.rs
@@ -0,0 +1,143 @@
+use axum::body::Body;
+use axum::extract::Query;
+use axum::http::StatusCode;
+use axum::response::Response;
+use bytes::Bytes;
+use data_encoding::BASE64URL_NOPAD;
+use futures::TryStreamExt;
+use nix_compat::{nix_http, nixbase32};
+use serde::Deserialize;
+use std::io;
+use tokio_util::io::ReaderStream;
+use tracing::{instrument, warn, Span};
+use tvix_store::nar::ingest_nar_and_hash;
+
+use crate::AppState;
+
+#[derive(Debug, Deserialize)]
+pub(crate) struct GetNARParams {
+    #[serde(rename = "narsize")]
+    nar_size: u64,
+}
+
+#[instrument(skip(blob_service, directory_service))]
+pub async fn get(
+    axum::extract::Path(root_node_enc): axum::extract::Path<String>,
+    axum::extract::Query(GetNARParams { nar_size }): Query<GetNARParams>,
+    axum::extract::State(AppState {
+        blob_service,
+        directory_service,
+        ..
+    }): axum::extract::State<AppState>,
+) -> Result<Response, StatusCode> {
+    use prost::Message;
+    // b64decode the root node passed *by the user*
+    let root_node_proto = BASE64URL_NOPAD
+        .decode(root_node_enc.as_bytes())
+        .map_err(|e| {
+            warn!(err=%e, "unable to decode root node b64");
+            StatusCode::NOT_FOUND
+        })?;
+
+    // check the proto size to be somewhat reasonable before parsing it.
+    if root_node_proto.len() > 4096 {
+        warn!("rejected too large root node");
+        return Err(StatusCode::BAD_REQUEST);
+    }
+
+    // parse the proto
+    let root_node: tvix_castore::proto::Node = Message::decode(Bytes::from(root_node_proto))
+        .map_err(|e| {
+            warn!(err=%e, "unable to decode root node proto");
+            StatusCode::NOT_FOUND
+        })?;
+
+    let (root_name, root_node) = root_node.into_name_and_node().map_err(|e| {
+        warn!(err=%e, "root node validation failed");
+        StatusCode::BAD_REQUEST
+    })?;
+
+    if !root_name.as_ref().is_empty() {
+        warn!("root node has name, which it shouldn't");
+        return Err(StatusCode::BAD_REQUEST);
+    }
+
+    let (w, r) = tokio::io::duplex(1024 * 8);
+
+    // spawn a task rendering the NAR to the client
+    tokio::spawn(async move {
+        if let Err(e) =
+            tvix_store::nar::write_nar(w, &root_node, blob_service, directory_service).await
+        {
+            warn!(err=%e, "failed to write out NAR");
+        }
+    });
+
+    Ok(Response::builder()
+        .status(StatusCode::OK)
+        .header("cache-control", "max-age=31536000, immutable")
+        .header("content-length", nar_size)
+        .header("content-type", nix_http::MIME_TYPE_NAR)
+        .body(Body::from_stream(ReaderStream::new(r)))
+        .unwrap())
+}
+
+#[instrument(skip(blob_service, directory_service, request))]
+pub async fn put(
+    axum::extract::Path(nar_str): axum::extract::Path<String>,
+    axum::extract::State(AppState {
+        blob_service,
+        directory_service,
+        root_nodes,
+        ..
+    }): axum::extract::State<AppState>,
+    request: axum::extract::Request,
+) -> Result<&'static str, StatusCode> {
+    let (nar_hash_expected, compression_suffix) =
+        nix_http::parse_nar_str(&nar_str).ok_or(StatusCode::UNAUTHORIZED)?;
+
+    // No paths with compression suffix are supported.
+    if !compression_suffix.is_empty() {
+        warn!(%compression_suffix, "invalid compression suffix requested");
+        return Err(StatusCode::UNAUTHORIZED);
+    }
+
+    let s = request.into_body().into_data_stream();
+
+    let mut r = tokio_util::io::StreamReader::new(s.map_err(|e| {
+        warn!(err=%e, "failed to read request body");
+        io::Error::new(io::ErrorKind::BrokenPipe, e.to_string())
+    }));
+
+    // ingest the NAR
+    let (root_node, nar_hash_actual, nar_size) =
+        ingest_nar_and_hash(blob_service.clone(), directory_service.clone(), &mut r)
+            .await
+            .map_err(|e| io::Error::new(io::ErrorKind::Other, e))
+            .map_err(|e| {
+                warn!(err=%e, "failed to ingest nar");
+                StatusCode::INTERNAL_SERVER_ERROR
+            })?;
+
+    let s = Span::current();
+    s.record("nar_hash.expected", nixbase32::encode(&nar_hash_expected));
+    s.record("nar_size", nar_size);
+
+    if nar_hash_expected != nar_hash_actual {
+        warn!(
+            nar_hash.expected = nixbase32::encode(&nar_hash_expected),
+            nar_hash.actual = nixbase32::encode(&nar_hash_actual),
+            "nar hash mismatch"
+        );
+        return Err(StatusCode::BAD_REQUEST);
+    }
+
+    // store mapping of narhash to root node into root_nodes.
+    // we need it later to populate the root node when accepting the PathInfo.
+    root_nodes.write().put(nar_hash_actual, root_node);
+
+    Ok("")
+}
+
+// FUTUREWORK: maybe head by narhash. Though not too critical, as we do
+// implement HEAD for .narinfo.
diff --git a/tvix/nar-bridge/src/narinfo.rs b/tvix/nar-bridge/src/narinfo.rs
new file mode 100644
index 000000000000..fc90f0b86629
--- /dev/null
+++ b/tvix/nar-bridge/src/narinfo.rs
@@ -0,0 +1,162 @@
+use axum::{http::StatusCode, response::IntoResponse};
+use bytes::Bytes;
+use nix_compat::{narinfo::NarInfo, nix_http, nixbase32};
+use prost::Message;
+use tracing::{instrument, warn, Span};
+use tvix_castore::proto::{self as castorepb};
+use tvix_store::proto::PathInfo;
+
+use crate::AppState;
+
+/// The size limit for NARInfo uploads nar-bridge receives
+const NARINFO_LIMIT: usize = 2 * 1024 * 1024;
+
+#[instrument(skip(path_info_service))]
+pub async fn head(
+    axum::extract::Path(narinfo_str): axum::extract::Path<String>,
+    axum::extract::State(AppState {
+        path_info_service, ..
+    }): axum::extract::State<AppState>,
+) -> Result<impl IntoResponse, StatusCode> {
+    let digest = nix_http::parse_narinfo_str(&narinfo_str).ok_or(StatusCode::NOT_FOUND)?;
+    Span::current().record("path_info.digest", &narinfo_str[0..32]);
+
+    if path_info_service
+        .get(digest)
+        .await
+        .map_err(|e| {
+            warn!(err=%e, "failed to get PathInfo");
+            StatusCode::INTERNAL_SERVER_ERROR
+        })?
+        .is_some()
+    {
+        Ok(([("content-type", nix_http::MIME_TYPE_NARINFO)], ""))
+    } else {
+        warn!("PathInfo not found");
+        Err(StatusCode::NOT_FOUND)
+    }
+}
+
+#[instrument(skip(path_info_service))]
+pub async fn get(
+    axum::extract::Path(narinfo_str): axum::extract::Path<String>,
+    axum::extract::State(AppState {
+        path_info_service, ..
+    }): axum::extract::State<AppState>,
+) -> Result<impl IntoResponse, StatusCode> {
+    let digest = nix_http::parse_narinfo_str(&narinfo_str).ok_or(StatusCode::NOT_FOUND)?;
+    Span::current().record("path_info.digest", &narinfo_str[0..32]);
+
+    // fetch the PathInfo
+    let path_info = path_info_service
+        .get(digest)
+        .await
+        .map_err(|e| {
+            warn!(err=%e, "failed to get PathInfo");
+            StatusCode::INTERNAL_SERVER_ERROR
+        })?
+        .ok_or(StatusCode::NOT_FOUND)?;
+
+    let store_path = path_info.validate().map_err(|e| {
+        warn!(err=%e, "invalid PathInfo");
+        StatusCode::INTERNAL_SERVER_ERROR
+    })?;
+
+    let mut narinfo = path_info.to_narinfo(store_path.as_ref()).ok_or_else(|| {
+        warn!(path_info=?path_info, "PathInfo contained no NAR data");
+        StatusCode::INTERNAL_SERVER_ERROR
+    })?;
+
+    // encode the (unnamed) root node in the NAR url itself.
+    // We strip the name from the proto node before sending it out.
+    // It's not needed to render the NAR, it'll make the URL shorter, and it
+    // will make caching these requests easier.
+    let (_, root_node) = path_info
+        .node
+        .as_ref()
+        .expect("invalid pathinfo")
+        .to_owned()
+        .into_name_and_node()
+        .expect("invalid pathinfo");
+
+    let url = format!(
+        "nar/tvix-castore/{}?narsize={}",
+        data_encoding::BASE64URL_NOPAD
+            .encode(&castorepb::Node::from_name_and_node("".into(), root_node).encode_to_vec()),
+        narinfo.nar_size,
+    );
+
+    narinfo.url = &url;
+
+    Ok((
+        [("content-type", nix_http::MIME_TYPE_NARINFO)],
+        narinfo.to_string(),
+    ))
+}
+
+#[instrument(skip(path_info_service, root_nodes, request))]
+pub async fn put(
+    axum::extract::Path(narinfo_str): axum::extract::Path<String>,
+    axum::extract::State(AppState {
+        path_info_service,
+        root_nodes,
+        ..
+    }): axum::extract::State<AppState>,
+    request: axum::extract::Request,
+) -> Result<&'static str, StatusCode> {
+    let _narinfo_digest = nix_http::parse_narinfo_str(&narinfo_str).ok_or(StatusCode::UNAUTHORIZED);
+    Span::current().record("path_info.digest", &narinfo_str[0..32]);
+
+    let narinfo_bytes: Bytes = axum::body::to_bytes(request.into_body(), NARINFO_LIMIT)
+        .await
+        .map_err(|e| {
+            warn!(err=%e, "unable to fetch body");
+            StatusCode::BAD_REQUEST
+        })?;
+
+    // Parse the narinfo from the body.
+    let narinfo_str = std::str::from_utf8(narinfo_bytes.as_ref()).map_err(|e| {
+        warn!(err=%e, "unable decode body as string");
+        StatusCode::BAD_REQUEST
+    })?;
+
+    let narinfo = NarInfo::parse(narinfo_str).map_err(|e| {
+        warn!(err=%e, "unable to parse narinfo");
+        StatusCode::BAD_REQUEST
+    })?;
+
+    // Extract the NARHash from the PathInfo.
+    Span::current().record("path_info.nar_info", nixbase32::encode(&narinfo.nar_hash));
+
+    // populate the pathinfo.
+    let mut pathinfo = PathInfo::from(&narinfo);
+
+    // Lookup root node with peek, as we don't want to update the LRU list.
+    // We need to be careful to not hold the RwLock across the await point.
+    let maybe_root_node: Option<tvix_castore::Node> =
+        root_nodes.read().peek(&narinfo.nar_hash).cloned();
+
+    match maybe_root_node {
+        Some(root_node) => {
+            // Set the root node from the lookup.
+            // We need to rename the node to the narinfo storepath basename, as
+            // that's where it's stored in PathInfo.
+            pathinfo.node = Some(castorepb::Node::from_name_and_node(
+                narinfo.store_path.to_string().into(),
+                root_node,
+            ));
+
+            // Persist the PathInfo.
+            path_info_service.put(pathinfo).await.map_err(|e| {
+                warn!(err=%e, "failed to persist the PathInfo");
+                StatusCode::INTERNAL_SERVER_ERROR
+            })?;
+
+            Ok("")
+        }
+        None => {
+            warn!("received narinfo with unknown NARHash");
+            Err(StatusCode::BAD_REQUEST)
+        }
+    }
+}
diff --git a/tvix/nix-compat-derive-tests/Cargo.toml b/tvix/nix-compat-derive-tests/Cargo.toml
new file mode 100644
index 000000000000..e69cb10e4fea
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "nix-compat-derive-tests"
+version = "0.1.0"
+edition = "2021"
+
+[features]
+compile-tests = []
+
+[dev-dependencies]
+hex-literal = { workspace = true }
+pretty_assertions = { workspace = true }
+rstest = { workspace = true }
+tokio-test = { workspace = true }
+trybuild = { workspace = true }
+tokio = { workspace = true, features = ["io-util", "macros"] }
+
+[dev-dependencies.nix-compat]
+version = "0.1.0"
+path = "../nix-compat"
+features = ["test", "wire"]
+
+[dev-dependencies.nix-compat-derive]
+version = "0.1.0"
+path = "../nix-compat-derive"
diff --git a/tvix/nix-compat-derive-tests/default.nix b/tvix/nix-compat-derive-tests/default.nix
new file mode 100644
index 000000000000..cabe9ad13780
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/default.nix
@@ -0,0 +1,5 @@
+{ depot, ... }:
+
+depot.tvix.crates.workspaceMembers.nix-compat-derive-tests.build.override {
+  runTests = true;
+}
diff --git a/tvix/nix-compat-derive-tests/tests/read_derive.rs b/tvix/nix-compat-derive-tests/tests/read_derive.rs
new file mode 100644
index 000000000000..055d70cf046e
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/read_derive.rs
@@ -0,0 +1,417 @@
+use std::str::FromStr;
+
+use nix_compat::nix_daemon::de::mock::{Builder, Error};
+use nix_compat::nix_daemon::de::NixRead;
+use nix_compat_derive::NixDeserialize;
+
+#[derive(Debug, PartialEq, Eq, NixDeserialize)]
+pub struct UnitTest;
+
+#[derive(Debug, PartialEq, Eq, NixDeserialize)]
+pub struct EmptyTupleTest();
+
+#[derive(Debug, PartialEq, Eq, NixDeserialize)]
+pub struct StructTest {
+    first: u64,
+    second: String,
+}
+
+#[derive(Debug, PartialEq, Eq, NixDeserialize)]
+pub struct TupleTest(u64, String);
+
+#[derive(Debug, PartialEq, Eq, NixDeserialize)]
+pub struct StructVersionTest {
+    test: u64,
+    #[nix(version = "20..")]
+    hello: String,
+}
+
+fn default_test() -> StructVersionTest {
+    StructVersionTest {
+        test: 89,
+        hello: String::from("klomp"),
+    }
+}
+
+#[derive(Debug, PartialEq, Eq, NixDeserialize)]
+pub struct TupleVersionTest(u64, #[nix(version = "25..")] String);
+
+#[derive(Debug, PartialEq, Eq, NixDeserialize)]
+pub struct TupleVersionDefaultTest(
+    u64,
+    #[nix(version = "..25", default = "default_test")] StructVersionTest,
+);
+
+#[tokio::test]
+async fn read_unit() {
+    let mut mock = Builder::new().build();
+    let v: UnitTest = mock.read_value().await.unwrap();
+    assert_eq!(UnitTest, v);
+}
+
+#[tokio::test]
+async fn read_empty_tuple() {
+    let mut mock = Builder::new().build();
+    let v: EmptyTupleTest = mock.read_value().await.unwrap();
+    assert_eq!(EmptyTupleTest(), v);
+}
+
+#[tokio::test]
+async fn read_struct() {
+    let mut mock = Builder::new().read_number(89).read_slice(b"klomp").build();
+    let v: StructTest = mock.read_value().await.unwrap();
+    assert_eq!(
+        StructTest {
+            first: 89,
+            second: String::from("klomp"),
+        },
+        v
+    );
+}
+
+#[tokio::test]
+async fn read_tuple() {
+    let mut mock = Builder::new().read_number(89).read_slice(b"klomp").build();
+    let v: TupleTest = mock.read_value().await.unwrap();
+    assert_eq!(TupleTest(89, String::from("klomp")), v);
+}
+
+#[tokio::test]
+async fn read_struct_version() {
+    let mut mock = Builder::new()
+        .version((1, 20))
+        .read_number(89)
+        .read_slice(b"klomp")
+        .build();
+    let v: StructVersionTest = mock.read_value().await.unwrap();
+    assert_eq!(default_test(), v);
+}
+
+#[tokio::test]
+async fn read_struct_without_version() {
+    let mut mock = Builder::new().version((1, 19)).read_number(89).build();
+    let v: StructVersionTest = mock.read_value().await.unwrap();
+    assert_eq!(
+        StructVersionTest {
+            test: 89,
+            hello: String::new(),
+        },
+        v
+    );
+}
+
+#[tokio::test]
+async fn read_tuple_version() {
+    let mut mock = Builder::new()
+        .version((1, 26))
+        .read_number(89)
+        .read_slice(b"klomp")
+        .build();
+    let v: TupleVersionTest = mock.read_value().await.unwrap();
+    assert_eq!(TupleVersionTest(89, "klomp".into()), v);
+}
+
+#[tokio::test]
+async fn read_tuple_without_version() {
+    let mut mock = Builder::new().version((1, 19)).read_number(89).build();
+    let v: TupleVersionTest = mock.read_value().await.unwrap();
+    assert_eq!(TupleVersionTest(89, String::new()), v);
+}
+
+#[tokio::test]
+async fn read_complex_1() {
+    let mut mock = Builder::new()
+        .version((1, 19))
+        .read_number(999)
+        .read_number(666)
+        .build();
+    let v: TupleVersionDefaultTest = mock.read_value().await.unwrap();
+    assert_eq!(
+        TupleVersionDefaultTest(
+            999,
+            StructVersionTest {
+                test: 666,
+                hello: String::new()
+            }
+        ),
+        v
+    );
+}
+
+#[tokio::test]
+async fn read_complex_2() {
+    let mut mock = Builder::new()
+        .version((1, 20))
+        .read_number(999)
+        .read_number(666)
+        .read_slice(b"The quick brown \xF0\x9F\xA6\x8A jumps over 13 lazy \xF0\x9F\x90\xB6.")
+        .build();
+    let v: TupleVersionDefaultTest = mock.read_value().await.unwrap();
+    assert_eq!(
+        TupleVersionDefaultTest(
+            999,
+            StructVersionTest {
+                test: 666,
+                hello: String::from("The quick brown ๐ŸฆŠ jumps over 13 lazy ๐Ÿถ.")
+            }
+        ),
+        v
+    );
+}
+
+#[tokio::test]
+async fn read_complex_3() {
+    let mut mock = Builder::new().version((1, 25)).read_number(999).build();
+    let v: TupleVersionDefaultTest = mock.read_value().await.unwrap();
+    assert_eq!(
+        TupleVersionDefaultTest(
+            999,
+            StructVersionTest {
+                test: 89,
+                hello: String::from("klomp")
+            }
+        ),
+        v
+    );
+}
+
+#[tokio::test]
+async fn read_complex_4() {
+    let mut mock = Builder::new().version((1, 26)).read_number(999).build();
+    let v: TupleVersionDefaultTest = mock.read_value().await.unwrap();
+    assert_eq!(
+        TupleVersionDefaultTest(
+            999,
+            StructVersionTest {
+                test: 89,
+                hello: String::from("klomp")
+            }
+        ),
+        v
+    );
+}
+
+#[tokio::test]
+async fn read_field_invalid_data() {
+    let mut mock = Builder::new()
+        .read_number(666)
+        .read_slice(b"The quick brown \xED\xA0\x80 jumped.")
+        .build();
+    let err = mock.read_value::<StructTest>().await.unwrap_err();
+    assert_eq!(
+        Error::InvalidData("invalid utf-8 sequence of 1 bytes from index 16".into()),
+        err
+    );
+}
+
+#[tokio::test]
+async fn read_field_missing_data() {
+    let mut mock = Builder::new().read_number(666).build();
+    let err = mock.read_value::<StructTest>().await.unwrap_err();
+    assert_eq!(Error::MissingData("unexpected end-of-file".into()), err);
+}
+
+#[tokio::test]
+async fn read_field_no_data() {
+    let mut mock = Builder::new().build();
+    let err = mock.read_value::<StructTest>().await.unwrap_err();
+    assert_eq!(Error::MissingData("unexpected end-of-file".into()), err);
+}
+
+#[tokio::test]
+async fn read_field_reader_error_first() {
+    let mut mock = Builder::new()
+        .read_number_error(Error::InvalidData("Bad reader".into()))
+        .build();
+    let err = mock.read_value::<StructTest>().await.unwrap_err();
+    assert_eq!(Error::InvalidData("Bad reader".into()), err);
+}
+
+#[tokio::test]
+async fn read_field_reader_error_later() {
+    let mut mock = Builder::new()
+        .read_number(999)
+        .read_bytes_error(Error::InvalidData("Bad reader".into()))
+        .build();
+    let err = mock.read_value::<StructTest>().await.unwrap_err();
+    assert_eq!(Error::InvalidData("Bad reader".into()), err);
+}
+
+#[derive(Debug, PartialEq, Eq, NixDeserialize)]
+#[nix(from_str)]
+struct TestFromStr;
+
+impl FromStr for TestFromStr {
+    type Err = String;
+
+    fn from_str(s: &str) -> Result<Self, Self::Err> {
+        if s == "test" {
+            Ok(TestFromStr)
+        } else {
+            Err(s.into())
+        }
+    }
+}
+
+#[tokio::test]
+async fn read_from_str() {
+    let mut mock = Builder::new().read_slice(b"test").build();
+    let value = mock.read_value::<TestFromStr>().await.unwrap();
+    assert_eq!(TestFromStr, value);
+}
+
+#[tokio::test]
+async fn read_from_str_invalid_data() {
+    let mut mock = Builder::new().read_slice(b"wrong string").build();
+    let err = mock.read_value::<TestFromStr>().await.unwrap_err();
+    assert_eq!(Error::InvalidData("wrong string".into()), err);
+}
+
+#[tokio::test]
+async fn read_from_str_invalid_string() {
+    let mut mock = Builder::new()
+        .read_slice(b"The quick brown \xED\xA0\x80 jumped.")
+        .build();
+    let err = mock.read_value::<TestFromStr>().await.unwrap_err();
+    assert_eq!(
+        Error::InvalidData("invalid utf-8 sequence of 1 bytes from index 16".into()),
+        err
+    );
+}
+
+#[tokio::test]
+async fn read_from_str_reader_error() {
+    let mut mock = Builder::new()
+        .read_bytes_error(Error::InvalidData("Bad reader".into()))
+        .build();
+    let err = mock.read_value::<TestFromStr>().await.unwrap_err();
+    assert_eq!(Error::InvalidData("Bad reader".into()), err);
+}
+
+#[derive(Debug, PartialEq, Eq, NixDeserialize)]
+#[nix(try_from = "u64")]
+struct TestTryFromU64;
+
+impl TryFrom<u64> for TestTryFromU64 {
+    type Error = u64;
+
+    fn try_from(value: u64) -> Result<TestTryFromU64, Self::Error> {
+        if value == 42 {
+            Ok(TestTryFromU64)
+        } else {
+            Err(value)
+        }
+    }
+}
+
+#[tokio::test]
+async fn read_try_from_u64() {
+    let mut mock = Builder::new().read_number(42).build();
+    let value = mock.read_value::<TestTryFromU64>().await.unwrap();
+    assert_eq!(TestTryFromU64, value);
+}
+
+#[tokio::test]
+async fn read_try_from_u64_invalid_data() {
+    let mut mock = Builder::new().read_number(666).build();
+    let err = mock.read_value::<TestTryFromU64>().await.unwrap_err();
+    assert_eq!(Error::InvalidData("666".into()), err);
+}
+
+#[tokio::test]
+async fn read_try_from_u64_reader_error() {
+    let mut mock = Builder::new()
+        .read_number_error(Error::InvalidData("Bad reader".into()))
+        .build();
+    let err = mock.read_value::<TestTryFromU64>().await.unwrap_err();
+    assert_eq!(Error::InvalidData("Bad reader".into()), err);
+}
+
+#[derive(Debug, PartialEq, Eq, NixDeserialize)]
+#[nix(from = "u64")]
+struct TestFromU64;
+
+impl From<u64> for TestFromU64 {
+    fn from(_value: u64) -> TestFromU64 {
+        TestFromU64
+    }
+}
+
+#[tokio::test]
+async fn read_from_u64() {
+    let mut mock = Builder::new().read_number(42).build();
+    let value = mock.read_value::<TestFromU64>().await.unwrap();
+    assert_eq!(TestFromU64, value);
+}
+
+#[tokio::test]
+async fn read_from_u64_reader_error() {
+    let mut mock = Builder::new()
+        .read_number_error(Error::InvalidData("Bad reader".into()))
+        .build();
+    let err = mock.read_value::<TestFromU64>().await.unwrap_err();
+    assert_eq!(Error::InvalidData("Bad reader".into()), err);
+}
+
+#[derive(Debug, PartialEq, Eq, NixDeserialize)]
+enum TestEnum {
+    #[nix(version = "..=19")]
+    Pre20(TestTryFromU64),
+    #[nix(version = "20..")]
+    Post20(StructVersionTest),
+}
+
+#[tokio::test]
+async fn read_enum_19() {
+    let mut mock = Builder::new().version((1, 19)).read_number(42).build();
+    let value = mock.read_value::<TestEnum>().await.unwrap();
+    assert_eq!(TestEnum::Pre20(TestTryFromU64), value);
+}
+
+#[tokio::test]
+async fn read_enum_20() {
+    let mut mock = Builder::new()
+        .version((1, 20))
+        .read_number(42)
+        .read_slice(b"klomp")
+        .build();
+    let value = mock.read_value::<TestEnum>().await.unwrap();
+    assert_eq!(
+        TestEnum::Post20(StructVersionTest {
+            test: 42,
+            hello: "klomp".into(),
+        }),
+        value
+    );
+}
+
+#[tokio::test]
+async fn read_enum_reader_error() {
+    let mut mock = Builder::new()
+        .version((1, 19))
+        .read_number_error(Error::InvalidData("Bad reader".into()))
+        .build();
+    let err = mock.read_value::<TestEnum>().await.unwrap_err();
+    assert_eq!(Error::InvalidData("Bad reader".into()), err);
+}
+
+#[tokio::test]
+async fn read_enum_invalid_data_19() {
+    let mut mock = Builder::new().version((1, 19)).read_number(666).build();
+    let err = mock.read_value::<TestEnum>().await.unwrap_err();
+    assert_eq!(Error::InvalidData("666".into()), err);
+}
+
+#[tokio::test]
+async fn read_enum_invalid_data_20() {
+    let mut mock = Builder::new()
+        .version((1, 20))
+        .read_number(666)
+        .read_slice(b"The quick brown \xED\xA0\x80 jumped.")
+        .build();
+    let err = mock.read_value::<TestEnum>().await.unwrap_err();
+    assert_eq!(
+        Error::InvalidData("invalid utf-8 sequence of 1 bytes from index 16".into()),
+        err
+    );
+}
diff --git a/tvix/nix-compat-derive-tests/tests/ui.rs b/tvix/nix-compat-derive-tests/tests/ui.rs
new file mode 100644
index 000000000000..6a7bffeaf832
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui.rs
@@ -0,0 +1,6 @@
+#[cfg(feature = "compile-tests")]
+#[test]
+fn ui() {
+    let t = trybuild::TestCases::new();
+    t.compile_fail("tests/ui/*.rs");
+}
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_bad_type.rs b/tvix/nix-compat-derive-tests/tests/ui/deserialize_bad_type.rs
new file mode 100644
index 000000000000..f77469679999
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_bad_type.rs
@@ -0,0 +1,10 @@
+use nix_compat_derive::NixDeserialize;
+
+pub struct BadType;
+
+#[derive(NixDeserialize)]
+pub struct Test {
+    version: BadType,
+}
+
+fn main() {}
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_bad_type.stderr b/tvix/nix-compat-derive-tests/tests/ui/deserialize_bad_type.stderr
new file mode 100644
index 000000000000..12ffdc83c726
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_bad_type.stderr
@@ -0,0 +1,21 @@
+error[E0277]: the trait bound `BadType: NixDeserialize` is not satisfied
+ --> tests/ui/deserialize_bad_type.rs:7:14
+  |
+7 |     version: BadType,
+  |              ^^^^^^^ the trait `NixDeserialize` is not implemented for `BadType`
+  |
+  = help: the following other types implement trait `NixDeserialize`:
+            BTreeMap<K, V>
+            String
+            Test
+            Vec<T>
+            bool
+            bytes::bytes::Bytes
+            i64
+            u64
+            usize
+note: required by a bound in `try_read_value`
+ --> $WORKSPACE/nix-compat/src/nix_daemon/de/mod.rs
+  |
+  |     fn try_read_value<V: NixDeserialize>(
+  |                          ^^^^^^^^^^^^^^ required by this bound in `NixRead::try_read_value`
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_enum_non_exaustive.rs b/tvix/nix-compat-derive-tests/tests/ui/deserialize_enum_non_exaustive.rs
new file mode 100644
index 000000000000..ab559f2b81c8
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_enum_non_exaustive.rs
@@ -0,0 +1,13 @@
+use nix_compat_derive::NixDeserialize;
+
+#[derive(NixDeserialize)]
+pub enum Test {
+    #[nix(version = "..=10")]
+    Old,
+    #[nix(version = "15..=17")]
+    Legacy,
+    #[nix(version = "50..")]
+    NewWay,
+}
+
+fn main() {}
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_enum_non_exaustive.stderr b/tvix/nix-compat-derive-tests/tests/ui/deserialize_enum_non_exaustive.stderr
new file mode 100644
index 000000000000..8a46d9439e35
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_enum_non_exaustive.stderr
@@ -0,0 +1,8 @@
+error[E0004]: non-exhaustive patterns: `11_u8..=14_u8` and `18_u8..=49_u8` not covered
+ --> tests/ui/deserialize_enum_non_exaustive.rs:3:10
+  |
+3 | #[derive(NixDeserialize)]
+  |          ^^^^^^^^^^^^^^ patterns `11_u8..=14_u8` and `18_u8..=49_u8` not covered
+  |
+  = note: the matched value is of type `u8`
+  = note: this error originates in the derive macro `NixDeserialize` (in Nightly builds, run with -Z macro-backtrace for more info)
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_from_missing.rs b/tvix/nix-compat-derive-tests/tests/ui/deserialize_from_missing.rs
new file mode 100644
index 000000000000..913b7c4f7e59
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_from_missing.rs
@@ -0,0 +1,7 @@
+use nix_compat_derive::NixDeserialize;
+
+#[derive(NixDeserialize)]
+#[nix(from = "u64")]
+pub struct Test;
+
+fn main() {}
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_from_missing.stderr b/tvix/nix-compat-derive-tests/tests/ui/deserialize_from_missing.stderr
new file mode 100644
index 000000000000..0124010cf10c
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_from_missing.stderr
@@ -0,0 +1,5 @@
+error[E0277]: the trait bound `Test: From<u64>` is not satisfied
+ --> tests/ui/deserialize_from_missing.rs:4:14
+  |
+4 | #[nix(from = "u64")]
+  |              ^^^^^ the trait `From<u64>` is not implemented for `Test`
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_from_str_error_not_display.rs b/tvix/nix-compat-derive-tests/tests/ui/deserialize_from_str_error_not_display.rs
new file mode 100644
index 000000000000..36cd4b153740
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_from_str_error_not_display.rs
@@ -0,0 +1,20 @@
+use std::str::FromStr;
+
+use nix_compat_derive::NixDeserialize;
+
+#[derive(NixDeserialize)]
+#[nix(from_str)]
+pub struct Test;
+
+impl FromStr for Test {
+    type Err = ();
+    fn from_str(s: &str) -> Result<Self, Self::Err> {
+        if s == "test" {
+            Ok(Test)
+        } else {
+            Err(())
+        }
+    }
+}
+
+fn main() {}
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_from_str_error_not_display.stderr b/tvix/nix-compat-derive-tests/tests/ui/deserialize_from_str_error_not_display.stderr
new file mode 100644
index 000000000000..8283ed5340f3
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_from_str_error_not_display.stderr
@@ -0,0 +1,13 @@
+error[E0277]: `()` doesn't implement `std::fmt::Display`
+ --> tests/ui/deserialize_from_str_error_not_display.rs:6:7
+  |
+6 | #[nix(from_str)]
+  |       ^^^^^^^^ `()` cannot be formatted with the default formatter
+  |
+  = help: the trait `std::fmt::Display` is not implemented for `()`
+  = note: in format strings you may be able to use `{:?}` (or {:#?} for pretty-print) instead
+note: required by a bound in `invalid_data`
+ --> $WORKSPACE/nix-compat/src/nix_daemon/de/mod.rs
+  |
+  |     fn invalid_data<T: fmt::Display>(msg: T) -> Self {
+  |                        ^^^^^^^^^^^^ required by this bound in `Error::invalid_data`
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_from_str_missing.rs b/tvix/nix-compat-derive-tests/tests/ui/deserialize_from_str_missing.rs
new file mode 100644
index 000000000000..a959db57e640
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_from_str_missing.rs
@@ -0,0 +1,7 @@
+use nix_compat_derive::NixDeserialize;
+
+#[derive(NixDeserialize)]
+#[nix(from_str)]
+pub struct Test;
+
+fn main() {}
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_from_str_missing.stderr b/tvix/nix-compat-derive-tests/tests/ui/deserialize_from_str_missing.stderr
new file mode 100644
index 000000000000..f68f588011fc
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_from_str_missing.stderr
@@ -0,0 +1,16 @@
+error[E0277]: the trait bound `Test: FromStr` is not satisfied
+ --> tests/ui/deserialize_from_str_missing.rs:4:7
+  |
+4 | #[nix(from_str)]
+  |       ^^^^^^^^ the trait `FromStr` is not implemented for `Test`
+  |
+  = help: the following other types implement trait `FromStr`:
+            IpAddr
+            Ipv4Addr
+            Ipv6Addr
+            NonZero<i128>
+            NonZero<i16>
+            NonZero<i32>
+            NonZero<i64>
+            NonZero<i8>
+          and $N others
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_missing_default.rs b/tvix/nix-compat-derive-tests/tests/ui/deserialize_missing_default.rs
new file mode 100644
index 000000000000..e9df62845518
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_missing_default.rs
@@ -0,0 +1,12 @@
+use nix_compat_derive::NixDeserialize;
+
+#[derive(NixDeserialize)]
+pub struct Value(String);
+
+#[derive(NixDeserialize)]
+pub struct Test {
+    #[nix(version = "20..")]
+    version: Value,
+}
+
+fn main() {}
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_missing_default.stderr b/tvix/nix-compat-derive-tests/tests/ui/deserialize_missing_default.stderr
new file mode 100644
index 000000000000..5cc2f5974e4c
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_missing_default.stderr
@@ -0,0 +1,12 @@
+error[E0277]: the trait bound `Value: Default` is not satisfied
+ --> tests/ui/deserialize_missing_default.rs:6:10
+  |
+6 | #[derive(NixDeserialize)]
+  |          ^^^^^^^^^^^^^^ the trait `Default` is not implemented for `Value`
+  |
+  = note: this error originates in the derive macro `NixDeserialize` (in Nightly builds, run with -Z macro-backtrace for more info)
+help: consider annotating `Value` with `#[derive(Default)]`
+  |
+4 + #[derive(Default)]
+5 | pub struct Value(String);
+  |
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_missing_default_path.rs b/tvix/nix-compat-derive-tests/tests/ui/deserialize_missing_default_path.rs
new file mode 100644
index 000000000000..4f319c069dca
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_missing_default_path.rs
@@ -0,0 +1,12 @@
+use nix_compat_derive::NixDeserialize;
+
+#[derive(NixDeserialize)]
+pub struct Value(String);
+
+#[derive(NixDeserialize)]
+pub struct Test {
+    #[nix(version = "20..", default = "Value::make_default")]
+    version: Value,
+}
+
+fn main() {}
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_missing_default_path.stderr b/tvix/nix-compat-derive-tests/tests/ui/deserialize_missing_default_path.stderr
new file mode 100644
index 000000000000..bb9af749128d
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_missing_default_path.stderr
@@ -0,0 +1,8 @@
+error[E0599]: no function or associated item named `make_default` found for struct `Value` in the current scope
+ --> tests/ui/deserialize_missing_default_path.rs:8:39
+  |
+4 | pub struct Value(String);
+  | ---------------- function or associated item `make_default` not found for this struct
+...
+8 |     #[nix(version = "20..", default = "Value::make_default")]
+  |                                       ^^^^^^^^^^^^^^^^^^^^^ function or associated item not found in `Value`
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_remote_missing_attr.rs b/tvix/nix-compat-derive-tests/tests/ui/deserialize_remote_missing_attr.rs
new file mode 100644
index 000000000000..cc2ab5bfbc11
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_remote_missing_attr.rs
@@ -0,0 +1,15 @@
+use nix_compat_derive::nix_deserialize_remote;
+
+pub struct Value(String);
+impl From<String> for Value {
+    fn from(s: String) -> Value {
+        Value(s)
+    }
+}
+
+nix_deserialize_remote!(
+    #[nix()]
+    Value
+);
+
+fn main() {}
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_remote_missing_attr.stderr b/tvix/nix-compat-derive-tests/tests/ui/deserialize_remote_missing_attr.stderr
new file mode 100644
index 000000000000..a1c18adc6e48
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_remote_missing_attr.stderr
@@ -0,0 +1,5 @@
+error: Missing from_str, from or try_from attribute
+  --> tests/ui/deserialize_remote_missing_attr.rs:10:25
+   |
+10 | nix_deserialize_remote!(#[nix()] Value);
+   |                         ^^^^^^^^^^^^^^
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_try_from_error_not_display.rs b/tvix/nix-compat-derive-tests/tests/ui/deserialize_try_from_error_not_display.rs
new file mode 100644
index 000000000000..7f8ad6bbfc4e
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_try_from_error_not_display.rs
@@ -0,0 +1,19 @@
+use nix_compat_derive::NixDeserialize;
+
+#[derive(NixDeserialize)]
+#[nix(try_from = "u64")]
+pub struct Test;
+
+impl TryFrom<u64> for Test {
+    type Error = ();
+
+    fn try_from(value: u64) -> Result<Test, Self::Error> {
+        if value == 42 {
+            Ok(Test)
+        } else {
+            Err(())
+        }
+    }
+}
+
+fn main() {}
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_try_from_error_not_display.stderr b/tvix/nix-compat-derive-tests/tests/ui/deserialize_try_from_error_not_display.stderr
new file mode 100644
index 000000000000..8e55a3c56189
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_try_from_error_not_display.stderr
@@ -0,0 +1,13 @@
+error[E0277]: `()` doesn't implement `std::fmt::Display`
+ --> tests/ui/deserialize_try_from_error_not_display.rs:4:18
+  |
+4 | #[nix(try_from = "u64")]
+  |                  ^^^^^ `()` cannot be formatted with the default formatter
+  |
+  = help: the trait `std::fmt::Display` is not implemented for `()`
+  = note: in format strings you may be able to use `{:?}` (or {:#?} for pretty-print) instead
+note: required by a bound in `invalid_data`
+ --> $WORKSPACE/nix-compat/src/nix_daemon/de/mod.rs
+  |
+  |     fn invalid_data<T: fmt::Display>(msg: T) -> Self {
+  |                        ^^^^^^^^^^^^ required by this bound in `Error::invalid_data`
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_try_from_missing.rs b/tvix/nix-compat-derive-tests/tests/ui/deserialize_try_from_missing.rs
new file mode 100644
index 000000000000..899095ae3542
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_try_from_missing.rs
@@ -0,0 +1,7 @@
+use nix_compat_derive::NixDeserialize;
+
+#[derive(NixDeserialize)]
+#[nix(try_from = "u64")]
+pub struct Test;
+
+fn main() {}
diff --git a/tvix/nix-compat-derive-tests/tests/ui/deserialize_try_from_missing.stderr b/tvix/nix-compat-derive-tests/tests/ui/deserialize_try_from_missing.stderr
new file mode 100644
index 000000000000..9605d1f3378f
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/deserialize_try_from_missing.stderr
@@ -0,0 +1,8 @@
+error[E0277]: the trait bound `Test: From<u64>` is not satisfied
+ --> tests/ui/deserialize_try_from_missing.rs:4:18
+  |
+4 | #[nix(try_from = "u64")]
+  |                  ^^^^^ the trait `From<u64>` is not implemented for `Test`, which is required by `Test: TryFrom<u64>`
+  |
+  = note: required for `u64` to implement `Into<Test>`
+  = note: required for `Test` to implement `TryFrom<u64>`
diff --git a/tvix/nix-compat-derive-tests/tests/ui/parse_bad_default.rs b/tvix/nix-compat-derive-tests/tests/ui/parse_bad_default.rs
new file mode 100644
index 000000000000..d87831cecf51
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/parse_bad_default.rs
@@ -0,0 +1,9 @@
+use nix_compat_derive::NixDeserialize;
+
+#[derive(NixDeserialize)]
+pub struct Test {
+    #[nix(default = 12)]
+    version: u8,
+}
+
+fn main() {}
diff --git a/tvix/nix-compat-derive-tests/tests/ui/parse_bad_default.stderr b/tvix/nix-compat-derive-tests/tests/ui/parse_bad_default.stderr
new file mode 100644
index 000000000000..acb1bc2a47bc
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/parse_bad_default.stderr
@@ -0,0 +1,5 @@
+error: expected nix attribute default to be string
+ --> tests/ui/parse_bad_default.rs:5:21
+  |
+5 |     #[nix(default = 12)]
+  |                     ^^
diff --git a/tvix/nix-compat-derive-tests/tests/ui/parse_bad_default_path.rs b/tvix/nix-compat-derive-tests/tests/ui/parse_bad_default_path.rs
new file mode 100644
index 000000000000..fbde8ffbc2b0
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/parse_bad_default_path.rs
@@ -0,0 +1,9 @@
+use nix_compat_derive::NixDeserialize;
+
+#[derive(NixDeserialize)]
+pub struct Test {
+    #[nix(default = "12")]
+    version: u8,
+}
+
+fn main() {}
diff --git a/tvix/nix-compat-derive-tests/tests/ui/parse_bad_default_path.stderr b/tvix/nix-compat-derive-tests/tests/ui/parse_bad_default_path.stderr
new file mode 100644
index 000000000000..7628d4c83bea
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/parse_bad_default_path.stderr
@@ -0,0 +1,5 @@
+error: expected identifier
+ --> tests/ui/parse_bad_default_path.rs:5:21
+  |
+5 |     #[nix(default = "12")]
+  |                     ^^^^
diff --git a/tvix/nix-compat-derive-tests/tests/ui/parse_bad_nix.rs b/tvix/nix-compat-derive-tests/tests/ui/parse_bad_nix.rs
new file mode 100644
index 000000000000..690e76a20fe6
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/parse_bad_nix.rs
@@ -0,0 +1,9 @@
+use nix_compat_derive::NixDeserialize;
+
+#[derive(NixDeserialize)]
+pub struct Test {
+    #[nix]
+    version: u8,
+}
+
+fn main() {}
diff --git a/tvix/nix-compat-derive-tests/tests/ui/parse_bad_nix.stderr b/tvix/nix-compat-derive-tests/tests/ui/parse_bad_nix.stderr
new file mode 100644
index 000000000000..da3d2d9aab47
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/parse_bad_nix.stderr
@@ -0,0 +1,5 @@
+error: expected attribute arguments in parentheses: #[nix(...)]
+ --> tests/ui/parse_bad_nix.rs:5:7
+  |
+5 |     #[nix]
+  |       ^^^
diff --git a/tvix/nix-compat-derive-tests/tests/ui/parse_bad_version.rs b/tvix/nix-compat-derive-tests/tests/ui/parse_bad_version.rs
new file mode 100644
index 000000000000..35b3b05c23e1
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/parse_bad_version.rs
@@ -0,0 +1,9 @@
+use nix_compat_derive::NixDeserialize;
+
+#[derive(NixDeserialize)]
+pub struct Test {
+    #[nix(version = 12)]
+    version: u8,
+}
+
+fn main() {}
diff --git a/tvix/nix-compat-derive-tests/tests/ui/parse_bad_version.stderr b/tvix/nix-compat-derive-tests/tests/ui/parse_bad_version.stderr
new file mode 100644
index 000000000000..48cc817fac9d
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/parse_bad_version.stderr
@@ -0,0 +1,5 @@
+error: expected nix attribute version to be string
+ --> tests/ui/parse_bad_version.rs:5:21
+  |
+5 |     #[nix(version = 12)]
+  |                     ^^
diff --git a/tvix/nix-compat-derive-tests/tests/ui/parse_mising_version.rs b/tvix/nix-compat-derive-tests/tests/ui/parse_mising_version.rs
new file mode 100644
index 000000000000..9eaa743ed2b6
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/parse_mising_version.rs
@@ -0,0 +1,9 @@
+use nix_compat_derive::NixDeserialize;
+
+#[derive(NixDeserialize)]
+pub struct Test {
+    #[nix(version)]
+    version: u8,
+}
+
+fn main() {}
diff --git a/tvix/nix-compat-derive-tests/tests/ui/parse_mising_version.stderr b/tvix/nix-compat-derive-tests/tests/ui/parse_mising_version.stderr
new file mode 100644
index 000000000000..79f048e11198
--- /dev/null
+++ b/tvix/nix-compat-derive-tests/tests/ui/parse_mising_version.stderr
@@ -0,0 +1,5 @@
+error: expected `=`
+ --> tests/ui/parse_mising_version.rs:5:18
+  |
+5 |     #[nix(version)]
+  |                  ^
diff --git a/tvix/nix-compat-derive/Cargo.toml b/tvix/nix-compat-derive/Cargo.toml
new file mode 100644
index 000000000000..da6d6744e650
--- /dev/null
+++ b/tvix/nix-compat-derive/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "nix-compat-derive"
+version = "0.1.0"
+edition = "2021"
+
+[lib]
+proc-macro = true
+
+[dependencies]
+proc-macro2 = { workspace = true, features = ["proc-macro"] }
+quote = { workspace = true, features = ["proc-macro"] }
+syn = { version = "2.0.76", features = ["full", "extra-traits"] }
+
+[dev-dependencies]
+hex-literal = { workspace = true }
+pretty_assertions = { workspace = true }
+rstest = { workspace = true }
+tokio-test = { workspace = true }
+tokio = { workspace = true, features = ["io-util", "macros"] }
+
+[dev-dependencies.nix-compat]
+path = "../nix-compat"
+default-features = false
+features = ["async", "wire", "test"]
diff --git a/tvix/nix-compat-derive/default.nix b/tvix/nix-compat-derive/default.nix
new file mode 100644
index 000000000000..e6636e7f2510
--- /dev/null
+++ b/tvix/nix-compat-derive/default.nix
@@ -0,0 +1,5 @@
+{ depot, lib, ... }:
+
+depot.tvix.crates.workspaceMembers.nix-compat-derive.build.override {
+  runTests = true;
+}
diff --git a/tvix/nix-compat-derive/src/de.rs b/tvix/nix-compat-derive/src/de.rs
new file mode 100644
index 000000000000..ee79ea9d1012
--- /dev/null
+++ b/tvix/nix-compat-derive/src/de.rs
@@ -0,0 +1,272 @@
+use proc_macro2::{Span, TokenStream};
+use quote::{quote, quote_spanned, ToTokens};
+use syn::spanned::Spanned;
+use syn::{DeriveInput, Generics, Path, Type};
+
+use crate::internal::attrs::Default;
+use crate::internal::inputs::RemoteInput;
+use crate::internal::{attrs, Container, Context, Data, Field, Remote, Style, Variant};
+
+pub fn expand_nix_deserialize(nnixrs: Path, input: &mut DeriveInput) -> syn::Result<TokenStream> {
+    let cx = Context::new();
+    let cont = Container::from_ast(&cx, nnixrs, input);
+    cx.check()?;
+    let cont = cont.unwrap();
+
+    let ty = cont.ident_type();
+    let body = nix_deserialize_body(&cont);
+    let crate_path = cont.crate_path();
+
+    Ok(nix_deserialize_impl(
+        crate_path,
+        &ty,
+        &cont.original.generics,
+        body,
+    ))
+}
+
+pub fn expand_nix_deserialize_remote(
+    crate_path: Path,
+    input: &RemoteInput,
+) -> syn::Result<TokenStream> {
+    let cx = Context::new();
+    let remote = Remote::from_ast(&cx, crate_path, input);
+    cx.check()?;
+    let remote = remote.unwrap();
+
+    let crate_path = remote.crate_path();
+    let body = nix_deserialize_body_from(crate_path, &remote.attrs).expect("From tokenstream");
+    let generics = Generics::default();
+    Ok(nix_deserialize_impl(crate_path, remote.ty, &generics, body))
+}
+
+fn nix_deserialize_impl(
+    crate_path: &Path,
+    ty: &Type,
+    generics: &Generics,
+    body: TokenStream,
+) -> TokenStream {
+    let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
+
+    quote! {
+        #[automatically_derived]
+        impl #impl_generics #crate_path::nix_daemon::de::NixDeserialize for #ty #ty_generics
+            #where_clause
+        {
+            #[allow(clippy::manual_async_fn)]
+            fn try_deserialize<R>(reader: &mut R) -> impl ::std::future::Future<Output=Result<Option<Self>, R::Error>> + Send + '_
+                where R: ?Sized + #crate_path::nix_daemon::de::NixRead + Send,
+            {
+                #body
+            }
+        }
+    }
+}
+
+fn nix_deserialize_body_from(
+    crate_path: &syn::Path,
+    attrs: &attrs::Container,
+) -> Option<TokenStream> {
+    if let Some(span) = attrs.from_str.as_ref() {
+        Some(nix_deserialize_from_str(crate_path, span.span()))
+    } else if let Some(type_from) = attrs.type_from.as_ref() {
+        Some(nix_deserialize_from(type_from))
+    } else {
+        attrs
+            .type_try_from
+            .as_ref()
+            .map(|type_try_from| nix_deserialize_try_from(crate_path, type_try_from))
+    }
+}
+
+fn nix_deserialize_body(cont: &Container) -> TokenStream {
+    if let Some(tokens) = nix_deserialize_body_from(cont.crate_path(), &cont.attrs) {
+        tokens
+    } else {
+        match &cont.data {
+            Data::Struct(style, fields) => nix_deserialize_struct(*style, fields),
+            Data::Enum(variants) => nix_deserialize_enum(variants),
+        }
+    }
+}
+
+fn nix_deserialize_struct(style: Style, fields: &[Field<'_>]) -> TokenStream {
+    let read_fields = fields.iter().map(|f| {
+        let field = f.var_ident();
+        let ty = f.ty;
+        let read_value = quote_spanned! {
+            ty.span()=> if first__ {
+                first__ = false;
+                if let Some(v) = reader.try_read_value::<#ty>().await? {
+                    v
+                } else {
+                    return Ok(None);
+                }
+            } else {
+                reader.read_value::<#ty>().await?
+            }
+        };
+        if let Some(version) = f.attrs.version.as_ref() {
+            let default = match &f.attrs.default {
+                Default::Default => quote_spanned!(ty.span()=>::std::default::Default::default),
+                Default::Path(path) => path.to_token_stream(),
+                _ => panic!("No default for versioned field"),
+            };
+            quote! {
+                let #field : #ty = if (#version).contains(&reader.version().minor()) {
+                    #read_value
+                } else {
+                    #default()
+                };
+            }
+        } else {
+            quote! {
+                let #field : #ty = #read_value;
+            }
+        }
+    });
+
+    let field_names = fields.iter().map(|f| f.var_ident());
+    let construct = match style {
+        Style::Struct => {
+            quote! {
+                Self { #(#field_names),* }
+            }
+        }
+        Style::Tuple => {
+            quote! {
+                Self(#(#field_names),*)
+            }
+        }
+        Style::Unit => quote!(Self),
+    };
+    quote! {
+        #[allow(unused_assignments)]
+        async move {
+            let mut first__ = true;
+            #(#read_fields)*
+            Ok(Some(#construct))
+        }
+    }
+}
+
+fn nix_deserialize_variant(variant: &Variant<'_>) -> TokenStream {
+    let ident = variant.ident;
+    let read_fields = variant.fields.iter().map(|f| {
+        let field = f.var_ident();
+        let ty = f.ty;
+        let read_value = quote_spanned! {
+            ty.span()=> if first__ {
+                first__ = false;
+                if let Some(v) = reader.try_read_value::<#ty>().await? {
+                    v
+                } else {
+                    return Ok(None);
+                }
+            } else {
+                reader.read_value::<#ty>().await?
+            }
+        };
+        if let Some(version) = f.attrs.version.as_ref() {
+            let default = match &f.attrs.default {
+                Default::Default => quote_spanned!(ty.span()=>::std::default::Default::default),
+                Default::Path(path) => path.to_token_stream(),
+                _ => panic!("No default for versioned field"),
+            };
+            quote! {
+                let #field : #ty = if (#version).contains(&reader.version().minor()) {
+                    #read_value
+                } else {
+                    #default()
+                };
+            }
+        } else {
+            quote! {
+                let #field : #ty = #read_value;
+            }
+        }
+    });
+    let field_names = variant.fields.iter().map(|f| f.var_ident());
+    let construct = match variant.style {
+        Style::Struct => {
+            quote! {
+                Self::#ident { #(#field_names),* }
+            }
+        }
+        Style::Tuple => {
+            quote! {
+                Self::#ident(#(#field_names),*)
+            }
+        }
+        Style::Unit => quote!(Self::#ident),
+    };
+    let version = &variant.attrs.version;
+    quote! {
+        #version => {
+            #(#read_fields)*
+            Ok(Some(#construct))
+        }
+    }
+}
+
+fn nix_deserialize_enum(variants: &[Variant<'_>]) -> TokenStream {
+    let match_variant = variants
+        .iter()
+        .map(|variant| nix_deserialize_variant(variant));
+    quote! {
+        #[allow(unused_assignments)]
+        async move {
+            let mut first__ = true;
+            match reader.version().minor() {
+                #(#match_variant)*
+            }
+        }
+    }
+}
+
+fn nix_deserialize_from(ty: &Type) -> TokenStream {
+    quote_spanned! {
+        ty.span() =>
+        async move {
+            if let Some(value) = reader.try_read_value::<#ty>().await? {
+                Ok(Some(<Self as ::std::convert::From<#ty>>::from(value)))
+            } else {
+                Ok(None)
+            }
+        }
+    }
+}
+
+fn nix_deserialize_try_from(crate_path: &Path, ty: &Type) -> TokenStream {
+    quote_spanned! {
+        ty.span() =>
+        async move {
+            use #crate_path::nix_daemon::de::Error;
+            if let Some(item) = reader.try_read_value::<#ty>().await? {
+                <Self as ::std::convert::TryFrom<#ty>>::try_from(item)
+                    .map_err(Error::invalid_data)
+                    .map(Some)
+            } else {
+                Ok(None)
+            }
+        }
+    }
+}
+
+fn nix_deserialize_from_str(crate_path: &Path, span: Span) -> TokenStream {
+    quote_spanned! {
+        span =>
+        async move {
+            use #crate_path::nix_daemon::de::Error;
+            if let Some(buf) = reader.try_read_bytes().await? {
+                let s = ::std::str::from_utf8(&buf)
+                    .map_err(Error::invalid_data)?;
+                <Self as ::std::str::FromStr>::from_str(s)
+                    .map_err(Error::invalid_data)
+                    .map(Some)
+            } else {
+                Ok(None)
+            }
+        }
+    }
+}
diff --git a/tvix/nix-compat-derive/src/internal/attrs.rs b/tvix/nix-compat-derive/src/internal/attrs.rs
new file mode 100644
index 000000000000..dbc959d1e917
--- /dev/null
+++ b/tvix/nix-compat-derive/src/internal/attrs.rs
@@ -0,0 +1,358 @@
+use quote::ToTokens;
+use syn::meta::ParseNestedMeta;
+use syn::parse::Parse;
+use syn::{parse_quote, Attribute, Expr, ExprLit, ExprPath, Lit, Token};
+
+use super::symbol::{Symbol, CRATE, DEFAULT, FROM, FROM_STR, NIX, TRY_FROM, VERSION};
+use super::Context;
+
+#[derive(Debug, PartialEq, Eq)]
+pub enum Default {
+    None,
+    #[allow(clippy::enum_variant_names)]
+    Default,
+    Path(ExprPath),
+}
+
+impl Default {
+    pub fn is_none(&self) -> bool {
+        matches!(self, Default::None)
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct Field {
+    pub default: Default,
+    pub version: Option<syn::ExprRange>,
+}
+
+impl Field {
+    pub fn from_ast(ctx: &Context, attrs: &Vec<Attribute>) -> Field {
+        let mut version = None;
+        let mut default = Default::None;
+        for attr in attrs {
+            if attr.path() != NIX {
+                continue;
+            }
+            if let Err(err) = attr.parse_nested_meta(|meta| {
+                if meta.path == VERSION {
+                    version = parse_lit(ctx, &meta, VERSION)?;
+                } else if meta.path == DEFAULT {
+                    if meta.input.peek(Token![=]) {
+                        if let Some(path) = parse_lit(ctx, &meta, DEFAULT)? {
+                            default = Default::Path(path);
+                        }
+                    } else {
+                        default = Default::Default;
+                    }
+                } else {
+                    let path = meta.path.to_token_stream().to_string();
+                    return Err(meta.error(format_args!("unknown nix field attribute '{}'", path)));
+                }
+                Ok(())
+            }) {
+                eprintln!("{:?}", err.span().source_text());
+                ctx.syn_error(err);
+            }
+        }
+        if version.is_some() && default.is_none() {
+            default = Default::Default;
+        }
+
+        Field { default, version }
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct Variant {
+    pub version: syn::ExprRange,
+}
+
+impl Variant {
+    pub fn from_ast(ctx: &Context, attrs: &Vec<Attribute>) -> Variant {
+        let mut version = parse_quote!(..);
+        for attr in attrs {
+            if attr.path() != NIX {
+                continue;
+            }
+            if let Err(err) = attr.parse_nested_meta(|meta| {
+                if meta.path == VERSION {
+                    if let Some(v) = parse_lit(ctx, &meta, VERSION)? {
+                        version = v;
+                    }
+                } else {
+                    let path = meta.path.to_token_stream().to_string();
+                    return Err(
+                        meta.error(format_args!("unknown nix variant attribute '{}'", path))
+                    );
+                }
+                Ok(())
+            }) {
+                eprintln!("{:?}", err.span().source_text());
+                ctx.syn_error(err);
+            }
+        }
+
+        Variant { version }
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct Container {
+    pub from_str: Option<syn::Path>,
+    pub type_from: Option<syn::Type>,
+    pub type_try_from: Option<syn::Type>,
+    pub crate_path: Option<syn::Path>,
+}
+
+impl Container {
+    pub fn from_ast(ctx: &Context, attrs: &Vec<Attribute>) -> Container {
+        let mut type_from = None;
+        let mut type_try_from = None;
+        let mut crate_path = None;
+        let mut from_str = None;
+
+        for attr in attrs {
+            if attr.path() != NIX {
+                continue;
+            }
+            if let Err(err) = attr.parse_nested_meta(|meta| {
+                if meta.path == FROM {
+                    type_from = parse_lit(ctx, &meta, FROM)?;
+                } else if meta.path == TRY_FROM {
+                    type_try_from = parse_lit(ctx, &meta, TRY_FROM)?;
+                } else if meta.path == FROM_STR {
+                    from_str = Some(meta.path);
+                } else if meta.path == CRATE {
+                    crate_path = parse_lit(ctx, &meta, CRATE)?;
+                } else {
+                    let path = meta.path.to_token_stream().to_string();
+                    return Err(
+                        meta.error(format_args!("unknown nix variant attribute '{}'", path))
+                    );
+                }
+                Ok(())
+            }) {
+                eprintln!("{:?}", err.span().source_text());
+                ctx.syn_error(err);
+            }
+        }
+
+        Container {
+            from_str,
+            type_from,
+            type_try_from,
+            crate_path,
+        }
+    }
+}
+
+pub fn get_lit_str(
+    ctx: &Context,
+    meta: &ParseNestedMeta,
+    attr: Symbol,
+) -> syn::Result<Option<syn::LitStr>> {
+    let expr: Expr = meta.value()?.parse()?;
+    let mut value = &expr;
+    while let Expr::Group(e) = value {
+        value = &e.expr;
+    }
+    if let Expr::Lit(ExprLit {
+        lit: Lit::Str(s), ..
+    }) = value
+    {
+        Ok(Some(s.clone()))
+    } else {
+        ctx.error_spanned(
+            expr,
+            format_args!("expected nix attribute {} to be string", attr),
+        );
+        Ok(None)
+    }
+}
+
+pub fn parse_lit<T: Parse>(
+    ctx: &Context,
+    meta: &ParseNestedMeta,
+    attr: Symbol,
+) -> syn::Result<Option<T>> {
+    match get_lit_str(ctx, meta, attr)? {
+        Some(lit) => Ok(Some(lit.parse()?)),
+        None => Ok(None),
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use syn::{parse_quote, Attribute};
+
+    use crate::internal::Context;
+
+    use super::*;
+
+    #[test]
+    fn parse_field_version() {
+        let attrs: Vec<Attribute> = vec![parse_quote!(#[nix(version="..34")])];
+        let ctx = Context::new();
+        let field = Field::from_ast(&ctx, &attrs);
+        ctx.check().unwrap();
+        assert_eq!(
+            field,
+            Field {
+                default: Default::Default,
+                version: Some(parse_quote!(..34)),
+            }
+        );
+    }
+
+    #[test]
+    fn parse_field_default() {
+        let attrs: Vec<Attribute> = vec![parse_quote!(#[nix(default)])];
+        let ctx = Context::new();
+        let field = Field::from_ast(&ctx, &attrs);
+        ctx.check().unwrap();
+        assert_eq!(
+            field,
+            Field {
+                default: Default::Default,
+                version: None,
+            }
+        );
+    }
+
+    #[test]
+    fn parse_field_default_path() {
+        let attrs: Vec<Attribute> = vec![parse_quote!(#[nix(default="Default::default")])];
+        let ctx = Context::new();
+        let field = Field::from_ast(&ctx, &attrs);
+        ctx.check().unwrap();
+        assert_eq!(
+            field,
+            Field {
+                default: Default::Path(parse_quote!(Default::default)),
+                version: None,
+            }
+        );
+    }
+
+    #[test]
+    fn parse_field_both() {
+        let attrs: Vec<Attribute> =
+            vec![parse_quote!(#[nix(version="..", default="Default::default")])];
+        let ctx = Context::new();
+        let field = Field::from_ast(&ctx, &attrs);
+        ctx.check().unwrap();
+        assert_eq!(
+            field,
+            Field {
+                default: Default::Path(parse_quote!(Default::default)),
+                version: Some(parse_quote!(..)),
+            }
+        );
+    }
+
+    #[test]
+    fn parse_field_both_rev() {
+        let attrs: Vec<Attribute> =
+            vec![parse_quote!(#[nix(default="Default::default", version="..")])];
+        let ctx = Context::new();
+        let field = Field::from_ast(&ctx, &attrs);
+        ctx.check().unwrap();
+        assert_eq!(
+            field,
+            Field {
+                default: Default::Path(parse_quote!(Default::default)),
+                version: Some(parse_quote!(..)),
+            }
+        );
+    }
+
+    #[test]
+    fn parse_field_no_attr() {
+        let attrs: Vec<Attribute> = vec![];
+        let ctx = Context::new();
+        let field = Field::from_ast(&ctx, &attrs);
+        ctx.check().unwrap();
+        assert_eq!(
+            field,
+            Field {
+                default: Default::None,
+                version: None,
+            }
+        );
+    }
+
+    #[test]
+    fn parse_field_no_subattrs() {
+        let attrs: Vec<Attribute> = vec![parse_quote!(#[nix()])];
+        let ctx = Context::new();
+        let field = Field::from_ast(&ctx, &attrs);
+        ctx.check().unwrap();
+        assert_eq!(
+            field,
+            Field {
+                default: Default::None,
+                version: None,
+            }
+        );
+    }
+
+    #[test]
+    fn parse_variant_version() {
+        let attrs: Vec<Attribute> = vec![parse_quote!(#[nix(version="..34")])];
+        let ctx = Context::new();
+        let variant = Variant::from_ast(&ctx, &attrs);
+        ctx.check().unwrap();
+        assert_eq!(
+            variant,
+            Variant {
+                version: parse_quote!(..34),
+            }
+        );
+    }
+
+    #[test]
+    fn parse_variant_no_attr() {
+        let attrs: Vec<Attribute> = vec![];
+        let ctx = Context::new();
+        let variant = Variant::from_ast(&ctx, &attrs);
+        ctx.check().unwrap();
+        assert_eq!(
+            variant,
+            Variant {
+                version: parse_quote!(..),
+            }
+        );
+    }
+
+    #[test]
+    fn parse_variant_no_subattrs() {
+        let attrs: Vec<Attribute> = vec![parse_quote!(#[nix()])];
+        let ctx = Context::new();
+        let variant = Variant::from_ast(&ctx, &attrs);
+        ctx.check().unwrap();
+        assert_eq!(
+            variant,
+            Variant {
+                version: parse_quote!(..),
+            }
+        );
+    }
+
+    #[test]
+    fn parse_container_try_from() {
+        let attrs: Vec<Attribute> = vec![parse_quote!(#[nix(try_from="u64")])];
+        let ctx = Context::new();
+        let container = Container::from_ast(&ctx, &attrs);
+        ctx.check().unwrap();
+        assert_eq!(
+            container,
+            Container {
+                from_str: None,
+                type_from: None,
+                type_try_from: Some(parse_quote!(u64)),
+                crate_path: None,
+            }
+        );
+    }
+}
diff --git a/tvix/nix-compat-derive/src/internal/ctx.rs b/tvix/nix-compat-derive/src/internal/ctx.rs
new file mode 100644
index 000000000000..ba770e044bc2
--- /dev/null
+++ b/tvix/nix-compat-derive/src/internal/ctx.rs
@@ -0,0 +1,50 @@
+use std::cell::RefCell;
+use std::fmt;
+use std::thread::panicking;
+
+use quote::ToTokens;
+
+pub struct Context {
+    errors: RefCell<Option<Vec<syn::Error>>>,
+}
+
+impl Context {
+    pub fn new() -> Context {
+        Context {
+            errors: RefCell::new(Some(Vec::new())),
+        }
+    }
+
+    pub fn syn_error(&self, error: syn::Error) {
+        self.errors
+            .borrow_mut()
+            .as_mut()
+            .take()
+            .unwrap()
+            .push(error);
+    }
+
+    pub fn error_spanned<T: ToTokens, D: fmt::Display>(&self, tokens: T, message: D) {
+        self.syn_error(syn::Error::new_spanned(tokens, message));
+    }
+
+    pub fn check(&self) -> syn::Result<()> {
+        let mut iter = self.errors.borrow_mut().take().unwrap().into_iter();
+        let mut err = match iter.next() {
+            None => return Ok(()),
+            Some(err) => err,
+        };
+        for next_err in iter {
+            err.combine(next_err);
+        }
+        Err(err)
+    }
+}
+
+impl Drop for Context {
+    fn drop(&mut self) {
+        if self.errors.borrow().is_some() && !panicking() {
+            panic!("Context dropped without checking errors");
+        }
+    }
+}
diff --git a/tvix/nix-compat-derive/src/internal/inputs.rs b/tvix/nix-compat-derive/src/internal/inputs.rs
new file mode 100644
index 000000000000..097a141a5d7c
--- /dev/null
+++ b/tvix/nix-compat-derive/src/internal/inputs.rs
@@ -0,0 +1,110 @@
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct RemoteInput {
+    pub attrs: Vec<syn::Attribute>,
+    pub ident: syn::Type,
+}
+
+impl syn::parse::Parse for RemoteInput {
+    fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
+        let attrs = input.call(syn::Attribute::parse_outer)?;
+
+        let ident = input.parse::<syn::Type>()?;
+        Ok(RemoteInput { attrs, ident })
+    }
+}
+
+impl quote::ToTokens for RemoteInput {
+    fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
+        fn is_outer(attr: &&syn::Attribute) -> bool {
+            match attr.style {
+                syn::AttrStyle::Outer => true,
+                syn::AttrStyle::Inner(_) => false,
+            }
+        }
+        for attr in self.attrs.iter().filter(is_outer) {
+            attr.to_tokens(tokens);
+        }
+        self.ident.to_tokens(tokens);
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use syn::parse_quote;
+    //use syn::parse::Parse;
+
+    use super::*;
+
+    #[test]
+    fn test_input() {
+        let p: RemoteInput = parse_quote!(u64);
+        assert_eq!(
+            p,
+            RemoteInput {
+                attrs: vec![],
+                ident: parse_quote!(u64),
+            }
+        );
+    }
+
+    #[test]
+    fn test_input_attr() {
+        let p: RemoteInput = parse_quote!(
+            #[nix]
+            u64
+        );
+        assert_eq!(
+            p,
+            RemoteInput {
+                attrs: vec![parse_quote!(#[nix])],
+                ident: parse_quote!(u64),
+            }
+        );
+    }
+
+    #[test]
+    fn test_input_attr_multiple() {
+        let p: RemoteInput = parse_quote!(
+            #[nix]
+            #[hello]
+            u64
+        );
+        assert_eq!(
+            p,
+            RemoteInput {
+                attrs: vec![parse_quote!(#[nix]), parse_quote!(#[hello])],
+                ident: parse_quote!(u64),
+            }
+        );
+    }
+
+    #[test]
+    fn test_input_attr_full() {
+        let p: RemoteInput = parse_quote!(
+            #[nix(try_from = "u64")]
+            usize
+        );
+        assert_eq!(
+            p,
+            RemoteInput {
+                attrs: vec![parse_quote!(#[nix(try_from="u64")])],
+                ident: parse_quote!(usize),
+            }
+        );
+    }
+
+    #[test]
+    fn test_input_attr_other() {
+        let p: RemoteInput = parse_quote!(
+            #[muh]
+            u64
+        );
+        assert_eq!(
+            p,
+            RemoteInput {
+                attrs: vec![parse_quote!(#[muh])],
+                ident: parse_quote!(u64),
+            }
+        );
+    }
+}
diff --git a/tvix/nix-compat-derive/src/internal/mod.rs b/tvix/nix-compat-derive/src/internal/mod.rs
new file mode 100644
index 000000000000..20b243221619
--- /dev/null
+++ b/tvix/nix-compat-derive/src/internal/mod.rs
@@ -0,0 +1,183 @@
+use syn::punctuated::Punctuated;
+use syn::spanned::Spanned;
+use syn::Token;
+
+pub mod attrs;
+mod ctx;
+pub mod inputs;
+mod symbol;
+
+pub use ctx::Context;
+
+pub struct Field<'a> {
+    pub member: syn::Member,
+    pub ty: &'a syn::Type,
+    pub attrs: attrs::Field,
+    pub original: &'a syn::Field,
+}
+
+impl<'a> Field<'a> {
+    pub fn from_ast(ctx: &Context, idx: usize, field: &'a syn::Field) -> Field<'a> {
+        let attrs = attrs::Field::from_ast(ctx, &field.attrs);
+        let member = match &field.ident {
+            Some(id) => syn::Member::Named(id.clone()),
+            None => syn::Member::Unnamed(idx.into()),
+        };
+        Field {
+            member,
+            attrs,
+            ty: &field.ty,
+            original: field,
+        }
+    }
+
+    pub fn var_ident(&self) -> syn::Ident {
+        match &self.member {
+            syn::Member::Named(name) => name.clone(),
+            syn::Member::Unnamed(idx) => {
+                syn::Ident::new(&format!("field{}", idx.index), self.original.span())
+            }
+        }
+    }
+}
+
+pub struct Variant<'a> {
+    pub ident: &'a syn::Ident,
+    pub attrs: attrs::Variant,
+    pub style: Style,
+    pub fields: Vec<Field<'a>>,
+    //pub original: &'a syn::Variant,
+}
+
+impl<'a> Variant<'a> {
+    pub fn from_ast(ctx: &Context, variant: &'a syn::Variant) -> Self {
+        let attrs = attrs::Variant::from_ast(ctx, &variant.attrs);
+        let (style, fields) = match &variant.fields {
+            syn::Fields::Named(fields) => (Style::Struct, fields_ast(ctx, &fields.named)),
+            syn::Fields::Unnamed(fields) => (Style::Tuple, fields_ast(ctx, &fields.unnamed)),
+            syn::Fields::Unit => (Style::Unit, Vec::new()),
+        };
+        Variant {
+            ident: &variant.ident,
+            attrs,
+            style,
+            fields,
+            //original: variant,
+        }
+    }
+}
+
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
+pub enum Style {
+    Struct,
+    Tuple,
+    Unit,
+}
+
+pub enum Data<'a> {
+    Enum(Vec<Variant<'a>>),
+    Struct(Style, Vec<Field<'a>>),
+}
+
+pub struct Container<'a> {
+    pub ident: &'a syn::Ident,
+    pub attrs: attrs::Container,
+    pub data: Data<'a>,
+    pub crate_path: syn::Path,
+    pub original: &'a syn::DeriveInput,
+}
+
+impl<'a> Container<'a> {
+    pub fn from_ast(
+        ctx: &Context,
+        crate_path: syn::Path,
+        input: &'a mut syn::DeriveInput,
+    ) -> Option<Container<'a>> {
+        let attrs = attrs::Container::from_ast(ctx, &input.attrs);
+        let data = match &input.data {
+            syn::Data::Struct(s) => match &s.fields {
+                syn::Fields::Named(fields) => {
+                    Data::Struct(Style::Struct, fields_ast(ctx, &fields.named))
+                }
+                syn::Fields::Unnamed(fields) => {
+                    Data::Struct(Style::Tuple, fields_ast(ctx, &fields.unnamed))
+                }
+                syn::Fields::Unit => Data::Struct(Style::Unit, Vec::new()),
+            },
+            syn::Data::Enum(e) => {
+                let variants = e
+                    .variants
+                    .iter()
+                    .map(|variant| Variant::from_ast(ctx, variant))
+                    .collect();
+                Data::Enum(variants)
+            }
+            syn::Data::Union(u) => {
+                ctx.error_spanned(u.union_token, "Union not supported by nixrs");
+                return None;
+            }
+        };
+        Some(Container {
+            ident: &input.ident,
+            attrs,
+            data,
+            crate_path,
+            original: input,
+        })
+    }
+
+    pub fn crate_path(&self) -> &syn::Path {
+        if let Some(crate_path) = self.attrs.crate_path.as_ref() {
+            crate_path
+        } else {
+            &self.crate_path
+        }
+    }
+
+    pub fn ident_type(&self) -> syn::Type {
+        let path: syn::Path = self.ident.clone().into();
+        let tp = syn::TypePath { qself: None, path };
+        tp.into()
+    }
+}
+
+pub struct Remote<'a> {
+    pub attrs: attrs::Container,
+    pub ty: &'a syn::Type,
+    pub crate_path: syn::Path,
+}
+
+impl<'a> Remote<'a> {
+    pub fn from_ast(
+        ctx: &Context,
+        crate_path: syn::Path,
+        input: &'a inputs::RemoteInput,
+    ) -> Option<Remote<'a>> {
+        let attrs = attrs::Container::from_ast(ctx, &input.attrs);
+        if attrs.from_str.is_none() && attrs.type_from.is_none() && attrs.type_try_from.is_none() {
+            ctx.error_spanned(input, "Missing from_str, from or try_from attribute");
+            return None;
+        }
+        Some(Remote {
+            ty: &input.ident,
+            attrs,
+            crate_path,
+        })
+    }
+
+    pub fn crate_path(&self) -> &syn::Path {
+        if let Some(crate_path) = self.attrs.crate_path.as_ref() {
+            crate_path
+        } else {
+            &self.crate_path
+        }
+    }
+}
+
+fn fields_ast<'a>(ctx: &Context, fields: &'a Punctuated<syn::Field, Token![,]>) -> Vec<Field<'a>> {
+    fields
+        .iter()
+        .enumerate()
+        .map(|(idx, field)| Field::from_ast(ctx, idx, field))
+        .collect()
+}
diff --git a/tvix/nix-compat-derive/src/internal/symbol.rs b/tvix/nix-compat-derive/src/internal/symbol.rs
new file mode 100644
index 000000000000..ed3fe304eb5d
--- /dev/null
+++ b/tvix/nix-compat-derive/src/internal/symbol.rs
@@ -0,0 +1,32 @@
+use std::fmt;
+
+use syn::Path;
+
+#[derive(Copy, Clone)]
+pub struct Symbol(&'static str);
+
+pub const NIX: Symbol = Symbol("nix");
+pub const VERSION: Symbol = Symbol("version");
+pub const DEFAULT: Symbol = Symbol("default");
+pub const FROM: Symbol = Symbol("from");
+pub const TRY_FROM: Symbol = Symbol("try_from");
+pub const FROM_STR: Symbol = Symbol("from_str");
+pub const CRATE: Symbol = Symbol("crate");
+
+impl PartialEq<Symbol> for Path {
+    fn eq(&self, word: &Symbol) -> bool {
+        self.is_ident(word.0)
+    }
+}
+
+impl<'a> PartialEq<Symbol> for &'a Path {
+    fn eq(&self, word: &Symbol) -> bool {
+        self.is_ident(word.0)
+    }
+}
+
+impl fmt::Display for Symbol {
+    fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+        formatter.write_str(self.0)
+    }
+}
diff --git a/tvix/nix-compat-derive/src/lib.rs b/tvix/nix-compat-derive/src/lib.rs
new file mode 100644
index 000000000000..e3faed02ebab
--- /dev/null
+++ b/tvix/nix-compat-derive/src/lib.rs
@@ -0,0 +1,303 @@
+//! # Using derive
+//!
+//! 1. [Overview](#overview)
+//! 3. [Attributes](#attributes)
+//!     1. [Container attributes](#container-attributes)
+//!         1. [`#[nix(from_str)]`](#nixfrom_str)
+//!         2. [`#[nix(from = "FromType")]`](#nixfrom--fromtype)
+//!         3. [`#[nix(try_from = "FromType")]`](#nixtry_from--fromtype)
+//!         4. [`#[nix(crate = "...")]`](#nixcrate--)
+//!     2. [Variant attributes](#variant-attributes)
+//!         1. [`#[nix(version = "range")]`](#nixversion--range)
+//!     3. [Field attributes](#field-attributes)
+//!         1. [`#[nix(version = "range")]`](#nixversion--range-1)
+//!         2. [`#[nix(default)]`](#nixdefault)
+//!         3. [`#[nix(default = "path")]`](#nixdefault--path)
+//!
+//! ## Overview
+//!
+//! This crate contains derive macros and function-like macros for implementing
+//! `NixDeserialize` with less boilerplate.
+//!
+//! ### Examples
+//! ```rust
+//! # use nix_compat_derive::NixDeserialize;
+//! #
+//! #[derive(NixDeserialize)]
+//! struct Unnamed(u64, String);
+//! ```
+//!
+//! ```rust
+//! # use nix_compat_derive::NixDeserialize;
+//! #
+//! #[derive(NixDeserialize)]
+//! struct Fields {
+//!     number: u64,
+//!     message: String,
+//! };
+//! ```
+//!
+//! ```rust
+//! # use nix_compat_derive::NixDeserialize;
+//! #
+//! #[derive(NixDeserialize)]
+//! struct Ignored;
+//! ```
+//!
+//! ## Attributes
+//!
+//! To customize the derived trait implementations you can add
+//! [attributes](https://doc.rust-lang.org/reference/attributes.html)
+//! to containers, fields and variants.
+//!
+//! ```rust
+//! # use nix_compat_derive::NixDeserialize;
+//! #
+//! #[derive(NixDeserialize)]
+//! #[nix(crate="nix_compat")] // <-- This is a container attribute
+//! struct Fields {
+//!     number: u64,
+//!     #[nix(version="..20")] // <-- This is a field attribute
+//!     message: String,
+//! };
+//!
+//! #[derive(NixDeserialize)]
+//! #[nix(crate="nix_compat")] // <-- This is also a container attribute
+//! enum E {
+//!     #[nix(version="..=9")] // <-- This is a variant attribute
+//!     A(u64),
+//!     #[nix(version="10..")] // <-- This is also a variant attribute
+//!     B(String),
+//! }
+//! ```
+//!
+//! ### Container attributes
+//!
+//! ##### `#[nix(from_str)]`
+//!
+//! When `from_str` is specified the fields are all ignored and instead a
+//! `String` is first deserialized and then `FromStr::from_str` is used
+//! to convert this `String` to the container type.
+//!
+//! This means that the container must implement `FromStr` and the error
+//! returned from the `from_str` must implement `Display`.
+//!
+//! ###### Example
+//!
+//! ```rust
+//! # use nix_compat_derive::NixDeserialize;
+//! #
+//! #[derive(NixDeserialize)]
+//! #[nix(from_str)]
+//! struct MyString(String);
+//! impl std::str::FromStr for MyString {
+//!     type Err = String;
+//!     fn from_str(s: &str) -> Result<Self, Self::Err> {
+//!         if s != "bad string" {
+//!             Ok(MyString(s.to_string()))
+//!         } else {
+//!             Err("Got a bad string".to_string())
+//!         }
+//!     }
+//! }
+//! ```
+//!
+//! ##### `#[nix(from = "FromType")]`
+//!
+//! When `from` is specified the fields are all ignored and instead a
+//! value of `FromType` is first deserialized and then `From::from` is
+//! used to convert from this value to the container type.
+//!
+//! This means that the container must implement `From<FromType>` and
+//! `FromType` must implement `NixDeserialize`.
+//!
+//! ###### Example
+//!
+//! ```rust
+//! # use nix_compat_derive::NixDeserialize;
+//! #
+//! #[derive(NixDeserialize)]
+//! #[nix(from="usize")]
+//! struct MyValue(usize);
+//! impl From<usize> for MyValue {
+//!     fn from(val: usize) -> Self {
+//!         MyValue(val)
+//!     }
+//! }
+//! ```
+//!
+//! ##### `#[nix(try_from = "FromType")]`
+//!
+//! With `try_from` a value of `FromType` is first deserialized and then
+//! `TryFrom::try_from` is used to convert from this value to the container
+//! type.
+//!
+//! This means that the container must implement `TryFrom<FromType>` and
+//! `FromType` must implement `NixDeserialize`.
+//! The error returned from `try_from` also needs to implement `Display`.
+//!
+//! ###### Example
+//!
+//! ```rust
+//! # use nix_compat_derive::NixDeserialize;
+//! #
+//! #[derive(NixDeserialize)]
+//! #[nix(try_from="usize")]
+//! struct WrongAnswer(usize);
+//! impl TryFrom<usize> for WrongAnswer {
+//!     type Error = String;
+//!     fn try_from(val: usize) -> Result<Self, Self::Error> {
+//!         if val != 42 {
+//!             Ok(WrongAnswer(val))
+//!         } else {
+//!             Err("Got the answer to life the universe and everything".to_string())
+//!         }
+//!     }
+//! }
+//! ```
+//!
+//! ##### `#[nix(crate = "...")]`
+//!
+//! Specify the path to the `nix-compat` crate instance to use when referring
+//! to the API in the generated code. This is usually not needed.
+//!
+//! ### Variant attributes
+//!
+//! ##### `#[nix(version = "range")]`
+//!
+//! Specifies the protocol version range where this variant is used.
+//! When deriving an enum the `version` attribute is used to select which
+//! variant of the enum to deserialize. The range is for minor version and
+//! the version ranges of all variants combined must cover all versions
+//! without any overlap or the first variant that matches is selected.
+//!
+//! ###### Example
+//!
+//! ```rust
+//! # use nix_compat_derive::NixDeserialize;
+//! #[derive(NixDeserialize)]
+//! enum Testing {
+//!     #[nix(version="..=18")]
+//!     OldVersion(u64),
+//!     #[nix(version="19..")]
+//!     NewVersion(String),
+//! }
+//! ```
+//!
+//! ### Field attributes
+//!
+//! ##### `#[nix(version = "range")]`
+//!
+//! Specifies the protocol version range where this field is included.
+//! The range is for minor version. For example `version = "..20"`
+//! includes the field in protocol versions `1.0` to `1.19` and skips
+//! it in version `1.20` and above.
+//!
+//! ###### Example
+//!
+//! ```rust
+//! # use nix_compat_derive::NixDeserialize;
+//! #
+//! #[derive(NixDeserialize)]
+//! struct Field {
+//!     number: u64,
+//!     #[nix(version="..20")]
+//!     messsage: String,
+//! }
+//! ```
+//!
+//! ##### `#[nix(default)]`
+//!
+//! When a field is skipped because the active protocol version falls
+//! outside the range specified in [`#[nix(version = "range")]`](#nixversion--range-1)
+//! this attribute indicates that `Default::default()` should be used
+//! to get a value for the field. This is also the default
+//! when you only specify [`#[nix(version = "range")]`](#nixversion--range-1).
+//!
+//! ###### Example
+//!
+//! ```rust
+//! # use nix_compat_derive::NixDeserialize;
+//! #
+//! #[derive(NixDeserialize)]
+//! struct Field {
+//!     number: u64,
+//!     #[nix(version="..20", default)]
+//!     messsage: String,
+//! }
+//! ```
+//!
+//! ##### `#[nix(default = "path")]`
+//!
+//! When a field is skipped because the active protocol version falls
+//! outside the range specified in [`#[nix(version = "range")]`](#nixversion--range-1)
+//! this attribute indicates that the function in `path` should be called to
+//! get a default value for the field. The given function must be callable
+//! as `fn() -> T`.
+//! For example `default = "my_value"` would call `my_value()` and `default =
+//! "AType::empty"` would call `AType::empty()`.
+//!
+//! ###### Example
+//!
+//! ```rust
+//! # use nix_compat_derive::NixDeserialize;
+//! #
+//! #[derive(NixDeserialize)]
+//! struct Field {
+//!     number: u64,
+//!     #[nix(version="..20", default="missing_string")]
+//!     messsage: String,
+//! }
+//!
+//! fn missing_string() -> String {
+//!     "missing string".to_string()
+//! }
+//! ```
+
+use internal::inputs::RemoteInput;
+use proc_macro::TokenStream;
+use syn::{parse_quote, DeriveInput};
+
+mod de;
+mod internal;
+
+#[proc_macro_derive(NixDeserialize, attributes(nix))]
+pub fn derive_nix_deserialize(item: TokenStream) -> TokenStream {
+    let mut input = syn::parse_macro_input!(item as DeriveInput);
+    let nnixrs: syn::Path = parse_quote!(::nix_compat);
+    de::expand_nix_deserialize(nnixrs, &mut input)
+        .unwrap_or_else(syn::Error::into_compile_error)
+        .into()
+}
+
+/// Macro to implement `NixDeserialize` on a type.
+/// Sometimes you can't use the deriver to implement `NixDeserialize`
+/// (like when dealing with types in Rust standard library) but don't want
+/// to implement it yourself. So this macro can be used for those situations
+/// where you would derive using `#[nix(from_str)]`,
+/// `#[nix(from = "FromType")]` or `#[nix(try_from = "FromType")]` if you
+/// could.
+///
+/// #### Example
+///
+/// ```rust
+/// # use nix_compat_derive::nix_deserialize_remote;
+/// #
+/// struct MyU64(u64);
+///
+/// impl From<u64> for MyU64 {
+///     fn from(value: u64) -> Self {
+///         Self(value)
+///     }
+/// }
+///
+/// nix_deserialize_remote!(#[nix(from="u64")] MyU64);
+/// ```
+#[proc_macro]
+pub fn nix_deserialize_remote(item: TokenStream) -> TokenStream {
+    let input = syn::parse_macro_input!(item as RemoteInput);
+    let crate_path = parse_quote!(::nix_compat);
+    de::expand_nix_deserialize_remote(crate_path, &input)
+        .unwrap_or_else(syn::Error::into_compile_error)
+        .into()
+}
diff --git a/tvix/nix-compat/Cargo.toml b/tvix/nix-compat/Cargo.toml
index 876ac3ecadd0..58137e4de2e1 100644
--- a/tvix/nix-compat/Cargo.toml
+++ b/tvix/nix-compat/Cargo.toml
@@ -4,48 +4,51 @@ version = "0.1.0"
 edition = "2021"
 
 [features]
-# async NAR writer
+# async NAR writer. Also needs the `wire` feature.
 async = ["tokio"]
 # code emitting low-level packets used in the daemon protocol.
-wire = ["tokio", "pin-project-lite"]
+wire = ["tokio", "pin-project-lite", "bytes"]
+test = []
 
 # Enable all features by default.
-default = ["async", "wire"]
+default = ["async", "wire", "nix-compat-derive"]
 
 [dependencies]
-bitflags = "2.4.1"
-bstr = { version = "1.6.0", features = ["alloc", "unicode", "serde"] }
-data-encoding = "2.3.3"
-ed25519 = "2.2.3"
-ed25519-dalek = "2.1.0"
-enum-primitive-derive = "0.3.0"
-glob = "0.3.0"
-nom = "7.1.3"
-num-traits = "0.2.18"
-serde = { version = "1.0", features = ["derive"] }
-serde_json = "1.0"
-sha2 = "0.10.6"
-thiserror = "1.0.38"
-
-[dependencies.tokio]
-optional = true
-version = "1.32.0"
-features = ["io-util", "macros"]
-
-[dependencies.pin-project-lite]
+bitflags = { workspace = true }
+bstr = { workspace = true, features = ["alloc", "unicode", "serde"] }
+data-encoding = { workspace = true }
+ed25519 = { workspace = true }
+ed25519-dalek = { workspace = true }
+enum-primitive-derive = { workspace = true }
+glob = { workspace = true }
+mimalloc = { workspace = true }
+nom = { workspace = true }
+num-traits = { workspace = true }
+serde = { workspace = true, features = ["derive"] }
+serde_json = { workspace = true }
+sha2 = { workspace = true }
+thiserror = { workspace = true }
+tracing = { workspace = true }
+bytes = { workspace = true, optional = true }
+tokio = { workspace = true, features = ["io-util", "macros"], optional = true }
+pin-project-lite = { workspace = true, optional = true }
+
+[dependencies.nix-compat-derive]
+path = "../nix-compat-derive"
 optional = true
-version = "0.2.13"
 
 [dev-dependencies]
-criterion = { version = "0.5", features = ["html_reports"] }
-futures = { version = "0.3.30", default-features = false, features = ["executor"] }
-hex-literal = "0.4.1"
-lazy_static = "1.4.0"
-pretty_assertions = "1.4.0"
-rstest = "0.19.0"
-serde_json = "1.0"
-tokio-test = "0.4.3"
-zstd = "^0.13.0"
+criterion = { workspace = true, features = ["html_reports"] }
+futures = { workspace = true }
+hex-literal = { workspace = true }
+lazy_static = { workspace = true }
+mimalloc = { workspace = true }
+pretty_assertions = { workspace = true }
+rstest = { workspace = true }
+serde_json = { workspace = true }
+smol_str = { workspace = true }
+tokio-test = { workspace = true }
+zstd = { workspace = true }
 
 [[bench]]
 name = "derivation_parse_aterm"
diff --git a/tvix/nix-compat/benches/derivation_parse_aterm.rs b/tvix/nix-compat/benches/derivation_parse_aterm.rs
index 4ace7d4480f4..6557dd17af37 100644
--- a/tvix/nix-compat/benches/derivation_parse_aterm.rs
+++ b/tvix/nix-compat/benches/derivation_parse_aterm.rs
@@ -1,8 +1,12 @@
 use std::path::Path;
 
 use criterion::{black_box, criterion_group, criterion_main, Criterion};
+use mimalloc::MiMalloc;
 use nix_compat::derivation::Derivation;
 
+#[global_allocator]
+static GLOBAL: MiMalloc = MiMalloc;
+
 const RESOURCES_PATHS: &str = "src/derivation/tests/derivation_tests/ok";
 
 fn bench_aterm_parser(c: &mut Criterion) {
diff --git a/tvix/nix-compat/benches/narinfo_parse.rs b/tvix/nix-compat/benches/narinfo_parse.rs
index 7ffd24d12bc3..f35ba8468a88 100644
--- a/tvix/nix-compat/benches/narinfo_parse.rs
+++ b/tvix/nix-compat/benches/narinfo_parse.rs
@@ -1,8 +1,12 @@
 use criterion::{black_box, criterion_group, criterion_main, Criterion, Throughput};
 use lazy_static::lazy_static;
+use mimalloc::MiMalloc;
 use nix_compat::narinfo::NarInfo;
 use std::{io, str};
 
+#[global_allocator]
+static GLOBAL: MiMalloc = MiMalloc;
+
 const SAMPLE: &str = r#"StorePath: /nix/store/1pajsq519irjy86vli20bgq1wr1q3pny-banking-0.3.0
 URL: nar/0rdn027rxqbl42bv9jxhsipgq2hwqdapvwmdzligmzdmz2p9vybs.nar.xz
 Compression: xz
diff --git a/tvix/nix-compat/build.rs b/tvix/nix-compat/build.rs
new file mode 100644
index 000000000000..c66b97016245
--- /dev/null
+++ b/tvix/nix-compat/build.rs
@@ -0,0 +1,5 @@
+fn main() {
+    // Pick up new test case files
+    // https://github.com/la10736/rstest/issues/256
+    println!("cargo:rerun-if-changed=src/derivation/tests/derivation_tests")
+}
diff --git a/tvix/nix-compat/default.nix b/tvix/nix-compat/default.nix
index 9df76e12fce1..34938e3d6428 100644
--- a/tvix/nix-compat/default.nix
+++ b/tvix/nix-compat/default.nix
@@ -1,7 +1,11 @@
-{ depot, ... }:
+{ depot, lib, ... }:
 
-depot.tvix.crates.workspaceMembers.nix-compat.build.override {
+(depot.tvix.crates.workspaceMembers.nix-compat.build.override {
   runTests = true;
-  # make sure we also enable async here, so run the tests behind that feature flag.
-  features = [ "default" "async" "wire" ];
-}
+}).overrideAttrs (old: rec {
+  meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+  passthru = old.passthru // (depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = [ "async" "wire" ];
+  });
+})
diff --git a/tvix/nix-compat/src/aterm/mod.rs b/tvix/nix-compat/src/aterm/mod.rs
index 8806b6caf2e5..bb3b77bc7399 100644
--- a/tvix/nix-compat/src/aterm/mod.rs
+++ b/tvix/nix-compat/src/aterm/mod.rs
@@ -2,6 +2,6 @@ mod escape;
 mod parser;
 
 pub(crate) use escape::escape_bytes;
-pub(crate) use parser::parse_bstr_field;
-pub(crate) use parser::parse_str_list;
+pub(crate) use parser::parse_bytes_field;
 pub(crate) use parser::parse_string_field;
+pub(crate) use parser::parse_string_list;
diff --git a/tvix/nix-compat/src/aterm/parser.rs b/tvix/nix-compat/src/aterm/parser.rs
index a30cb40ab08d..a570573a8700 100644
--- a/tvix/nix-compat/src/aterm/parser.rs
+++ b/tvix/nix-compat/src/aterm/parser.rs
@@ -11,8 +11,10 @@ use nom::multi::separated_list0;
 use nom::sequence::delimited;
 use nom::IResult;
 
-/// Parse a bstr and undo any escaping.
-fn parse_escaped_bstr(i: &[u8]) -> IResult<&[u8], BString> {
+/// Parse a bstr and undo any escaping (which is why this needs to allocate).
+// FUTUREWORK: have a version for fields that are known to not need escaping
+// (like store paths), and use &str.
+fn parse_escaped_bytes(i: &[u8]) -> IResult<&[u8], BString> {
     escaped_transform(
         is_not("\"\\"),
         '\\',
@@ -29,14 +31,14 @@ fn parse_escaped_bstr(i: &[u8]) -> IResult<&[u8], BString> {
 
 /// Parse a field in double quotes, undo any escaping, and return the unquoted
 /// and decoded `Vec<u8>`.
-pub(crate) fn parse_bstr_field(i: &[u8]) -> IResult<&[u8], BString> {
+pub(crate) fn parse_bytes_field(i: &[u8]) -> IResult<&[u8], BString> {
     // inside double quotesโ€ฆ
     delimited(
         nomchar('\"'),
         // There is
         alt((
             // โ€ฆeither is a bstr after unescaping
-            parse_escaped_bstr,
+            parse_escaped_bytes,
             // โ€ฆor an empty string.
             map(tag(b""), |_| BString::default()),
         )),
@@ -45,8 +47,8 @@ pub(crate) fn parse_bstr_field(i: &[u8]) -> IResult<&[u8], BString> {
 }
 
 /// Parse a field in double quotes, undo any escaping, and return the unquoted
-/// and decoded string, if it's a valid string. Or fail parsing if the bytes are
-/// no valid UTF-8.
+/// and decoded [String], if it's valid UTF-8.
+/// Or fail parsing if the bytes are no valid UTF-8.
 pub(crate) fn parse_string_field(i: &[u8]) -> IResult<&[u8], String> {
     // inside double quotesโ€ฆ
     delimited(
@@ -54,18 +56,18 @@ pub(crate) fn parse_string_field(i: &[u8]) -> IResult<&[u8], String> {
         // There is
         alt((
             // either is a String after unescaping
-            nom::combinator::map_opt(parse_escaped_bstr, |escaped_bstr| {
-                String::from_utf8(escaped_bstr.into()).ok()
+            nom::combinator::map_opt(parse_escaped_bytes, |escaped_bytes| {
+                String::from_utf8(escaped_bytes.into()).ok()
             }),
             // or an empty string.
-            map(tag(b""), |_| String::new()),
+            map(tag(b""), |_| "".to_string()),
         )),
         nomchar('\"'),
     )(i)
 }
 
-/// Parse a list of of string fields (enclosed in brackets)
-pub(crate) fn parse_str_list(i: &[u8]) -> IResult<&[u8], Vec<String>> {
+/// Parse a list of string fields (enclosed in brackets)
+pub(crate) fn parse_string_list(i: &[u8]) -> IResult<&[u8], Vec<String>> {
     // inside brackets
     delimited(
         nomchar('['),
@@ -89,7 +91,7 @@ mod tests {
         #[case] expected: &[u8],
         #[case] exp_rest: &[u8],
     ) {
-        let (rest, parsed) = super::parse_bstr_field(input).expect("must parse");
+        let (rest, parsed) = super::parse_bytes_field(input).expect("must parse");
         assert_eq!(exp_rest, rest, "expected remainder");
         assert_eq!(expected, parsed);
     }
@@ -118,7 +120,7 @@ mod tests {
     #[case::empty_list(b"[]", vec![], b"")]
     #[case::empty_list_with_rest(b"[]blub", vec![], b"blub")]
     fn parse_list(#[case] input: &[u8], #[case] expected: Vec<String>, #[case] exp_rest: &[u8]) {
-        let (rest, parsed) = super::parse_str_list(input).expect("must parse");
+        let (rest, parsed) = super::parse_string_list(input).expect("must parse");
         assert_eq!(exp_rest, rest, "expected remainder");
         assert_eq!(expected, parsed);
     }
diff --git a/tvix/nix-compat/src/bin/drvfmt.rs b/tvix/nix-compat/src/bin/drvfmt.rs
index ddc1f0389f26..fca22c2cb2ae 100644
--- a/tvix/nix-compat/src/bin/drvfmt.rs
+++ b/tvix/nix-compat/src/bin/drvfmt.rs
@@ -3,6 +3,11 @@ use std::{collections::BTreeMap, io::Read};
 use nix_compat::derivation::Derivation;
 use serde_json::json;
 
+use mimalloc::MiMalloc;
+
+#[global_allocator]
+static GLOBAL: MiMalloc = MiMalloc;
+
 /// construct a serde_json::Value from a Derivation.
 /// Some environment values can be non-valid UTF-8 strings.
 /// `serde_json` prints them out really unreadable.
diff --git a/tvix/nix-compat/src/derivation/mod.rs b/tvix/nix-compat/src/derivation/mod.rs
index 6e12e3ea86e6..6baeaba38299 100644
--- a/tvix/nix-compat/src/derivation/mod.rs
+++ b/tvix/nix-compat/src/derivation/mod.rs
@@ -36,11 +36,11 @@ pub struct Derivation {
 
     /// Map from drv path to output names used from this derivation.
     #[serde(rename = "inputDrvs")]
-    pub input_derivations: BTreeMap<StorePath, BTreeSet<String>>,
+    pub input_derivations: BTreeMap<StorePath<String>, BTreeSet<String>>,
 
     /// Plain store paths of additional inputs.
     #[serde(rename = "inputSrcs")]
-    pub input_sources: BTreeSet<StorePath>,
+    pub input_sources: BTreeSet<StorePath<String>>,
 
     /// Maps output names to Output.
     pub outputs: BTreeMap<String, Output>,
@@ -127,7 +127,10 @@ impl Derivation {
     /// the `name` with a `.drv` suffix as name, all [Derivation::input_sources] and
     /// keys of [Derivation::input_derivations] as references, and the ATerm string of
     /// the [Derivation] as content.
-    pub fn calculate_derivation_path(&self, name: &str) -> Result<StorePath, DerivationError> {
+    pub fn calculate_derivation_path(
+        &self,
+        name: &str,
+    ) -> Result<StorePath<String>, DerivationError> {
         // append .drv to the name
         let name = &format!("{}.drv", name);
 
@@ -141,7 +144,6 @@ impl Derivation {
             .collect();
 
         build_text_path(name, self.to_aterm_bytes(), references)
-            .map(|s| s.to_owned())
             .map_err(|_e| DerivationError::InvalidOutputName(name.to_string()))
     }
 
@@ -210,7 +212,7 @@ impl Derivation {
                 self.input_derivations
                     .iter()
                     .map(|(drv_path, output_names)| {
-                        let hash = fn_lookup_hash_derivation_modulo(&drv_path.into());
+                        let hash = fn_lookup_hash_derivation_modulo(&drv_path.as_ref());
 
                         (hash, output_names.to_owned())
                     }),
diff --git a/tvix/nix-compat/src/derivation/output.rs b/tvix/nix-compat/src/derivation/output.rs
index 266617f587f8..0b81ef3c3155 100644
--- a/tvix/nix-compat/src/derivation/output.rs
+++ b/tvix/nix-compat/src/derivation/output.rs
@@ -1,5 +1,4 @@
 use crate::nixhash::CAHash;
-use crate::store_path::StorePathRef;
 use crate::{derivation::OutputError, store_path::StorePath};
 use serde::de::Unexpected;
 use serde::{Deserialize, Serialize};
@@ -10,7 +9,7 @@ use std::borrow::Cow;
 #[derive(Clone, Debug, Default, Eq, PartialEq, Serialize)]
 pub struct Output {
     /// Store path of build result.
-    pub path: Option<StorePath>,
+    pub path: Option<StorePath<String>>,
 
     #[serde(flatten)]
     pub ca_hash: Option<CAHash>, // we can only represent a subset here.
@@ -33,10 +32,10 @@ impl<'de> Deserialize<'de> for Output {
                 &"a string",
             ))?;
 
-        let path = StorePathRef::from_absolute_path(path.as_bytes())
+        let path = StorePath::from_absolute_path(path.as_bytes())
             .map_err(|_| serde::de::Error::invalid_value(Unexpected::Str(path), &"StorePath"))?;
         Ok(Self {
-            path: Some(path.to_owned()),
+            path: Some(path),
             ca_hash: CAHash::from_map::<D>(&fields)?,
         })
     }
diff --git a/tvix/nix-compat/src/derivation/parse_error.rs b/tvix/nix-compat/src/derivation/parse_error.rs
index fc97f1a9883b..f625d9aeb724 100644
--- a/tvix/nix-compat/src/derivation/parse_error.rs
+++ b/tvix/nix-compat/src/derivation/parse_error.rs
@@ -20,7 +20,7 @@ pub enum ErrorKind {
     DuplicateInputDerivationOutputName(String, String),
 
     #[error("duplicate input source: {0}")]
-    DuplicateInputSource(StorePath),
+    DuplicateInputSource(StorePath<String>),
 
     #[error("nix hash error: {0}")]
     NixHashError(nixhash::Error),
diff --git a/tvix/nix-compat/src/derivation/parser.rs b/tvix/nix-compat/src/derivation/parser.rs
index 2775294960fe..4fff7181ba40 100644
--- a/tvix/nix-compat/src/derivation/parser.rs
+++ b/tvix/nix-compat/src/derivation/parser.rs
@@ -3,7 +3,6 @@
 //!
 //! [ATerm]: http://program-transformation.org/Tools/ATermFormat.html
 
-use bstr::BString;
 use nom::bytes::complete::tag;
 use nom::character::complete::char as nomchar;
 use nom::combinator::{all_consuming, map_res};
@@ -14,7 +13,7 @@ use thiserror;
 
 use crate::derivation::parse_error::{into_nomerror, ErrorKind, NomError, NomResult};
 use crate::derivation::{write, CAHash, Derivation, Output};
-use crate::store_path::{self, StorePath, StorePathRef};
+use crate::store_path::{self, StorePath};
 use crate::{aterm, nixhash};
 
 #[derive(Debug, thiserror::Error)]
@@ -73,7 +72,7 @@ fn parse_output(i: &[u8]) -> NomResult<&[u8], (String, Output)> {
                     terminated(aterm::parse_string_field, nomchar(',')),
                     terminated(aterm::parse_string_field, nomchar(',')),
                     terminated(aterm::parse_string_field, nomchar(',')),
-                    aterm::parse_bstr_field,
+                    aterm::parse_bytes_field,
                 ))(i)
                 .map_err(into_nomerror)
             },
@@ -102,7 +101,7 @@ fn parse_output(i: &[u8]) -> NomResult<&[u8], (String, Output)> {
                             path: if output_path.is_empty() {
                                 None
                             } else {
-                                Some(string_to_store_path(i, output_path)?)
+                                Some(string_to_store_path(i, &output_path)?)
                             },
                             ca_hash: hash_with_mode,
                         },
@@ -132,12 +131,12 @@ fn parse_outputs(i: &[u8]) -> NomResult<&[u8], BTreeMap<String, Output>> {
 
     match res {
         Ok((rst, outputs_lst)) => {
-            let mut outputs: BTreeMap<String, Output> = BTreeMap::default();
+            let mut outputs = BTreeMap::default();
             for (output_name, output) in outputs_lst.into_iter() {
                 if outputs.contains_key(&output_name) {
                     return Err(nom::Err::Failure(NomError {
                         input: i,
-                        code: ErrorKind::DuplicateMapKey(output_name),
+                        code: ErrorKind::DuplicateMapKey(output_name.to_string()),
                     }));
                 }
                 outputs.insert(output_name, output);
@@ -149,11 +148,13 @@ fn parse_outputs(i: &[u8]) -> NomResult<&[u8], BTreeMap<String, Output>> {
     }
 }
 
-fn parse_input_derivations(i: &[u8]) -> NomResult<&[u8], BTreeMap<StorePath, BTreeSet<String>>> {
-    let (i, input_derivations_list) = parse_kv::<Vec<String>, _>(aterm::parse_str_list)(i)?;
+fn parse_input_derivations(
+    i: &[u8],
+) -> NomResult<&[u8], BTreeMap<StorePath<String>, BTreeSet<String>>> {
+    let (i, input_derivations_list) = parse_kv(aterm::parse_string_list)(i)?;
 
     // This is a HashMap of drv paths to a list of output names.
-    let mut input_derivations: BTreeMap<StorePath, BTreeSet<String>> = BTreeMap::new();
+    let mut input_derivations: BTreeMap<StorePath<String>, BTreeSet<_>> = BTreeMap::new();
 
     for (input_derivation, output_names) in input_derivations_list {
         let mut new_output_names = BTreeSet::new();
@@ -170,7 +171,7 @@ fn parse_input_derivations(i: &[u8]) -> NomResult<&[u8], BTreeMap<StorePath, BTr
             new_output_names.insert(output_name);
         }
 
-        let input_derivation: StorePath = string_to_store_path(i, input_derivation)?;
+        let input_derivation = string_to_store_path(i, input_derivation.as_str())?;
 
         input_derivations.insert(input_derivation, new_output_names);
     }
@@ -178,16 +179,16 @@ fn parse_input_derivations(i: &[u8]) -> NomResult<&[u8], BTreeMap<StorePath, BTr
     Ok((i, input_derivations))
 }
 
-fn parse_input_sources(i: &[u8]) -> NomResult<&[u8], BTreeSet<StorePath>> {
-    let (i, input_sources_lst) = aterm::parse_str_list(i).map_err(into_nomerror)?;
+fn parse_input_sources(i: &[u8]) -> NomResult<&[u8], BTreeSet<StorePath<String>>> {
+    let (i, input_sources_lst) = aterm::parse_string_list(i).map_err(into_nomerror)?;
 
     let mut input_sources: BTreeSet<_> = BTreeSet::new();
     for input_source in input_sources_lst.into_iter() {
-        let input_source: StorePath = string_to_store_path(i, input_source)?;
+        let input_source = string_to_store_path(i, input_source.as_str())?;
         if input_sources.contains(&input_source) {
             return Err(nom::Err::Failure(NomError {
                 input: i,
-                code: ErrorKind::DuplicateInputSource(input_source),
+                code: ErrorKind::DuplicateInputSource(input_source.to_owned()),
             }));
         } else {
             input_sources.insert(input_source);
@@ -197,24 +198,27 @@ fn parse_input_sources(i: &[u8]) -> NomResult<&[u8], BTreeSet<StorePath>> {
     Ok((i, input_sources))
 }
 
-fn string_to_store_path(
-    i: &[u8],
-    path_str: String,
-) -> Result<StorePath, nom::Err<NomError<&[u8]>>> {
-    #[cfg(debug_assertions)]
-    let path_str2 = path_str.clone();
-
-    let path: StorePath = StorePathRef::from_absolute_path(path_str.as_bytes())
-        .map_err(|e: store_path::Error| {
+fn string_to_store_path<'a, 'i, S>(
+    i: &'i [u8],
+    path_str: &'a str,
+) -> Result<StorePath<S>, nom::Err<NomError<&'i [u8]>>>
+where
+    S: std::cmp::Eq
+        + std::fmt::Display
+        + std::clone::Clone
+        + std::ops::Deref<Target = str>
+        + std::convert::From<&'a str>,
+{
+    let path =
+        StorePath::from_absolute_path(path_str.as_bytes()).map_err(|e: store_path::Error| {
             nom::Err::Failure(NomError {
                 input: i,
                 code: e.into(),
             })
-        })?
-        .to_owned();
+        })?;
 
     #[cfg(debug_assertions)]
-    assert_eq!(path_str2, path.to_absolute_path());
+    assert_eq!(path_str, path.to_absolute_path());
 
     Ok(path)
 }
@@ -240,9 +244,9 @@ pub fn parse_derivation(i: &[u8]) -> NomResult<&[u8], Derivation> {
                 // // parse builder
                 |i| terminated(aterm::parse_string_field, nomchar(','))(i).map_err(into_nomerror),
                 // // parse arguments
-                |i| terminated(aterm::parse_str_list, nomchar(','))(i).map_err(into_nomerror),
+                |i| terminated(aterm::parse_string_list, nomchar(','))(i).map_err(into_nomerror),
                 // parse environment
-                parse_kv::<BString, _>(aterm::parse_bstr_field),
+                parse_kv(aterm::parse_bytes_field),
             )),
             nomchar(')'),
         )
@@ -376,11 +380,11 @@ mod tests {
         };
         static ref EXP_AB_MAP: BTreeMap<String, BString> = {
             let mut b = BTreeMap::new();
-            b.insert("a".to_string(), b"1".as_bstr().to_owned());
-            b.insert("b".to_string(), b"2".as_bstr().to_owned());
+            b.insert("a".to_string(), b"1".into());
+            b.insert("b".to_string(), b"2".into());
             b
         };
-        static ref EXP_INPUT_DERIVATIONS_SIMPLE: BTreeMap<StorePath, BTreeSet<String>> = {
+        static ref EXP_INPUT_DERIVATIONS_SIMPLE: BTreeMap<StorePath<String>, BTreeSet<String>> = {
             let mut b = BTreeMap::new();
             b.insert(
                 StorePath::from_bytes(b"8bjm87p310sb7r2r0sg4xrynlvg86j8k-hello-2.12.1.tar.gz.drv")
@@ -427,8 +431,8 @@ mod tests {
         #[case] expected: &BTreeMap<String, BString>,
         #[case] exp_rest: &[u8],
     ) {
-        let (rest, parsed) = super::parse_kv::<BString, _>(crate::aterm::parse_bstr_field)(input)
-            .expect("must parse");
+        let (rest, parsed) =
+            super::parse_kv(crate::aterm::parse_bytes_field)(input).expect("must parse");
         assert_eq!(exp_rest, rest, "expected remainder");
         assert_eq!(*expected, parsed);
     }
@@ -437,8 +441,7 @@ mod tests {
     #[test]
     fn parse_kv_fail_dup_keys() {
         let input: &'static [u8] = b"[(\"a\",\"1\"),(\"a\",\"2\")]";
-        let e = super::parse_kv::<BString, _>(crate::aterm::parse_bstr_field)(input)
-            .expect_err("must fail");
+        let e = super::parse_kv(crate::aterm::parse_bytes_field)(input).expect_err("must fail");
 
         match e {
             nom::Err::Failure(e) => {
@@ -454,7 +457,7 @@ mod tests {
     #[case::simple(EXP_INPUT_DERIVATIONS_SIMPLE_ATERM.as_bytes(), &EXP_INPUT_DERIVATIONS_SIMPLE)]
     fn parse_input_derivations(
         #[case] input: &'static [u8],
-        #[case] expected: &BTreeMap<StorePath, BTreeSet<String>>,
+        #[case] expected: &BTreeMap<StorePath<String>, BTreeSet<String>>,
     ) {
         let (rest, parsed) = super::parse_input_derivations(input).expect("must parse");
 
diff --git a/tvix/nix-compat/src/derivation/write.rs b/tvix/nix-compat/src/derivation/write.rs
index 735b781574e1..42dadcd76064 100644
--- a/tvix/nix-compat/src/derivation/write.rs
+++ b/tvix/nix-compat/src/derivation/write.rs
@@ -6,7 +6,7 @@
 use crate::aterm::escape_bytes;
 use crate::derivation::{ca_kind_prefix, output::Output};
 use crate::nixbase32;
-use crate::store_path::{StorePath, StorePathRef, STORE_DIR_WITH_SLASH};
+use crate::store_path::{StorePath, STORE_DIR_WITH_SLASH};
 use bstr::BString;
 use data_encoding::HEXLOWER;
 
@@ -32,16 +32,12 @@ pub const QUOTE: char = '"';
 /// the context a lot.
 pub(crate) trait AtermWriteable {
     fn aterm_write(&self, writer: &mut impl Write) -> std::io::Result<()>;
-
-    fn aterm_bytes(&self) -> Vec<u8> {
-        let mut bytes = Vec::new();
-        self.aterm_write(&mut bytes)
-            .expect("unexpected write errors to Vec");
-        bytes
-    }
 }
 
-impl AtermWriteable for StorePathRef<'_> {
+impl<S> AtermWriteable for StorePath<S>
+where
+    S: std::cmp::Eq + std::ops::Deref<Target = str>,
+{
     fn aterm_write(&self, writer: &mut impl Write) -> std::io::Result<()> {
         write_char(writer, QUOTE)?;
         writer.write_all(STORE_DIR_WITH_SLASH.as_bytes())?;
@@ -53,13 +49,6 @@ impl AtermWriteable for StorePathRef<'_> {
     }
 }
 
-impl AtermWriteable for StorePath {
-    fn aterm_write(&self, writer: &mut impl Write) -> std::io::Result<()> {
-        let r: StorePathRef = self.into();
-        r.aterm_write(writer)
-    }
-}
-
 impl AtermWriteable for String {
     fn aterm_write(&self, writer: &mut impl Write) -> std::io::Result<()> {
         write_field(writer, self, true)
@@ -186,7 +175,7 @@ pub(crate) fn write_input_derivations(
 
 pub(crate) fn write_input_sources(
     writer: &mut impl Write,
-    input_sources: &BTreeSet<StorePath>,
+    input_sources: &BTreeSet<StorePath<String>>,
 ) -> Result<(), io::Error> {
     write_char(writer, BRACKET_OPEN)?;
     write_array_elements(
diff --git a/tvix/nix-compat/src/lib.rs b/tvix/nix-compat/src/lib.rs
index a71ede3eecf0..f30c557889a8 100644
--- a/tvix/nix-compat/src/lib.rs
+++ b/tvix/nix-compat/src/lib.rs
@@ -1,8 +1,12 @@
+extern crate self as nix_compat;
+
 pub(crate) mod aterm;
 pub mod derivation;
 pub mod nar;
 pub mod narinfo;
+pub mod nix_http;
 pub mod nixbase32;
+pub mod nixcpp;
 pub mod nixhash;
 pub mod path_info;
 pub mod store_path;
@@ -11,7 +15,7 @@ pub mod store_path;
 pub mod wire;
 
 #[cfg(feature = "wire")]
-mod nix_daemon;
+pub mod nix_daemon;
 #[cfg(feature = "wire")]
 pub use nix_daemon::worker_protocol;
 #[cfg(feature = "wire")]
diff --git a/tvix/nix-compat/src/nar/listing/mod.rs b/tvix/nix-compat/src/nar/listing/mod.rs
new file mode 100644
index 000000000000..5a9a3b4d3613
--- /dev/null
+++ b/tvix/nix-compat/src/nar/listing/mod.rs
@@ -0,0 +1,128 @@
+//! Parser for the Nix archive listing format, aka .ls.
+//!
+//! LS files are produced by the C++ Nix implementation via `write-nar-listing=1` query parameter
+//! passed to a store implementation when transferring store paths.
+//!
+//! Listing files contains metadata about a file and its offset in the corresponding NAR.
+//!
+//! NOTE: LS entries does not offer any integrity field to validate the retrieved file at the provided
+//! offset. Validating the contents is the caller's responsibility.
+
+use std::{
+    collections::HashMap,
+    path::{Component, Path},
+};
+
+use serde::Deserialize;
+
+#[cfg(test)]
+mod test;
+
+#[derive(Debug, thiserror::Error)]
+pub enum ListingError {
+    // TODO: add an enum of what component was problematic
+    // reusing `std::path::Component` is not possible as it contains a lifetime.
+    /// An unsupported path component can be:
+    /// - either a Windows prefix (`C:\\`, `\\share\\`)
+    /// - either a parent directory (`..`)
+    /// - either a root directory (`/`)
+    #[error("unsupported path component")]
+    UnsupportedPathComponent,
+    #[error("invalid encoding for entry component")]
+    InvalidEncoding,
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(tag = "type", rename_all = "lowercase")]
+pub enum ListingEntry {
+    Regular {
+        size: u64,
+        #[serde(default)]
+        executable: bool,
+        #[serde(rename = "narOffset")]
+        nar_offset: u64,
+    },
+    Directory {
+        // It's tempting to think that the key should be a `Vec<u8>`
+        // but Nix does not support that and will fail to emit a listing version 1 for any non-UTF8
+        // encodeable string.
+        entries: HashMap<String, ListingEntry>,
+    },
+    Symlink {
+        target: String,
+    },
+}
+
+impl ListingEntry {
+    /// Given a relative path without `..` component, this will locate, relative to this entry, a
+    /// deeper entry.
+    ///
+    /// If the path is invalid, a listing error [`ListingError`] will be returned.
+    /// If the entry cannot be found, `None` will be returned.
+    pub fn locate<P: AsRef<Path>>(&self, path: P) -> Result<Option<&ListingEntry>, ListingError> {
+        // We perform a simple DFS on the components of the path
+        // while rejecting dangerous components, e.g. `..`ย or `/`
+        // Files and symlinks are *leaves*, i.e. we return them
+        let mut cur = self;
+        for component in path.as_ref().components() {
+            match component {
+                Component::CurDir => continue,
+                Component::RootDir | Component::Prefix(_) | Component::ParentDir => {
+                    return Err(ListingError::UnsupportedPathComponent)
+                }
+                Component::Normal(file_or_dir_name) => {
+                    if let Self::Directory { entries } = cur {
+                        // As Nix cannot encode non-UTF8 components in the listing (see comment on
+                        // the `Directory` enum variant), invalid encodings path components are
+                        // errors.
+                        let entry_name = file_or_dir_name
+                            .to_str()
+                            .ok_or(ListingError::InvalidEncoding)?;
+
+                        if let Some(new_entry) = entries.get(entry_name) {
+                            cur = new_entry;
+                        } else {
+                            return Ok(None);
+                        }
+                    } else {
+                        return Ok(None);
+                    }
+                }
+            }
+        }
+
+        // By construction, we found the node that corresponds to the path traversal.
+        Ok(Some(cur))
+    }
+}
+
+#[derive(Debug)]
+pub struct ListingVersion<const V: u8>;
+
+#[derive(Debug, thiserror::Error)]
+#[error("Invalid version: {0}")]
+struct ListingVersionError(u8);
+
+impl<'de, const V: u8> Deserialize<'de> for ListingVersion<V> {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: serde::Deserializer<'de>,
+    {
+        let value = u8::deserialize(deserializer)?;
+        if value == V {
+            Ok(ListingVersion::<V>)
+        } else {
+            Err(serde::de::Error::custom(ListingVersionError(value)))
+        }
+    }
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(untagged)]
+#[non_exhaustive]
+pub enum Listing {
+    V1 {
+        root: ListingEntry,
+        version: ListingVersion<1>,
+    },
+}
diff --git a/tvix/nix-compat/src/nar/listing/test.rs b/tvix/nix-compat/src/nar/listing/test.rs
new file mode 100644
index 000000000000..5b2ac3f166fe
--- /dev/null
+++ b/tvix/nix-compat/src/nar/listing/test.rs
@@ -0,0 +1,59 @@
+use std::{collections::HashMap, path::PathBuf, str::FromStr};
+
+use crate::nar;
+
+#[test]
+fn weird_paths() {
+    let root = nar::listing::ListingEntry::Directory {
+        entries: HashMap::new(),
+    };
+
+    root.locate("../../../../etc/passwd")
+        .expect_err("Failed to reject `../` fragment in a path during traversal");
+
+    // Gated on Windows as C:\\ is parsed as `Component::Normal(_)` on Linux.
+    #[cfg(target_os = "windows")]
+    root.locate("C:\\\\Windows\\System32")
+        .expect_err("Failed to reject Windows-style prefixes");
+
+    root.locate("/etc/passwd")
+        .expect_err("Failed to reject absolute UNIX paths");
+}
+
+#[test]
+fn nixos_release() {
+    let listing_bytes = include_bytes!("../tests/nixos-release.ls");
+    let listing: nar::listing::Listing = serde_json::from_slice(listing_bytes).unwrap();
+
+    let nar::listing::Listing::V1 { root, .. } = listing;
+    assert!(matches!(root, nar::listing::ListingEntry::Directory { .. }));
+
+    let build_products = root
+        .locate(PathBuf::from_str("nix-support/hydra-build-products").unwrap())
+        .expect("Failed to locate a known file in a directory")
+        .expect("File was unexpectedly not found in the listing");
+
+    assert!(matches!(
+        build_products,
+        nar::listing::ListingEntry::Regular { .. }
+    ));
+
+    let nonexisting_file = root
+        .locate(PathBuf::from_str("nix-support/does-not-exist").unwrap())
+        .expect("Failed to locate an unknown file in a directory");
+
+    assert!(
+        nonexisting_file.is_none(),
+        "Non-existing file was unexpectedly found in the listing"
+    );
+
+    let existing_dir = root
+        .locate(PathBuf::from_str("nix-support").unwrap())
+        .expect("Failed to locate a known directory in a directory")
+        .expect("Directory was expectedly found in the listing");
+
+    assert!(matches!(
+        existing_dir,
+        nar::listing::ListingEntry::Directory { .. }
+    ));
+}
diff --git a/tvix/nix-compat/src/nar/mod.rs b/tvix/nix-compat/src/nar/mod.rs
index c678d26ffb38..d0e8ee8a412f 100644
--- a/tvix/nix-compat/src/nar/mod.rs
+++ b/tvix/nix-compat/src/nar/mod.rs
@@ -1,4 +1,5 @@
 pub(crate) mod wire;
 
+pub mod listing;
 pub mod reader;
 pub mod writer;
diff --git a/tvix/nix-compat/src/nar/reader/mod.rs b/tvix/nix-compat/src/nar/reader/mod.rs
index 9e9237ead363..eef3b10f3c28 100644
--- a/tvix/nix-compat/src/nar/reader/mod.rs
+++ b/tvix/nix-compat/src/nar/reader/mod.rs
@@ -16,7 +16,7 @@ use std::marker::PhantomData;
 // Required reading for understanding this module.
 use crate::nar::wire;
 
-#[cfg(feature = "async")]
+#[cfg(all(feature = "async", feature = "wire"))]
 pub mod r#async;
 
 mod read;
@@ -29,9 +29,11 @@ struct ArchiveReader<'a, 'r> {
     inner: &'a mut Reader<'r>,
 
     /// In debug mode, also track when we need to abandon this archive reader.
+    ///
     /// The archive reader must be abandoned when:
     ///   * An error is encountered at any point
     ///   * A file or directory reader is dropped before being read entirely.
+    ///
     /// All of these checks vanish in release mode.
     status: ArchiveReaderStatus<'a>,
 }
diff --git a/tvix/nix-compat/src/nar/tests/nixos-release.ls b/tvix/nix-compat/src/nar/tests/nixos-release.ls
new file mode 100644
index 000000000000..9dd350b7cf86
--- /dev/null
+++ b/tvix/nix-compat/src/nar/tests/nixos-release.ls
@@ -0,0 +1 @@
+{"root":{"entries":{"iso":{"entries":{"nixos-minimal-new-kernel-no-zfs-24.11pre660688.bee6b69aad74-x86_64-linux.iso":{"narOffset":440,"size":1051721728,"type":"regular"}},"type":"directory"},"nix-support":{"entries":{"hydra-build-products":{"narOffset":1051722544,"size":211,"type":"regular"},"system":{"narOffset":1051722944,"size":13,"type":"regular"}},"type":"directory"}},"type":"directory"},"version":1}
\ No newline at end of file
diff --git a/tvix/nix-compat/src/nar/wire/mod.rs b/tvix/nix-compat/src/nar/wire/mod.rs
index 9e99b530ce15..ddf021bc1fa1 100644
--- a/tvix/nix-compat/src/nar/wire/mod.rs
+++ b/tvix/nix-compat/src/nar/wire/mod.rs
@@ -39,7 +39,7 @@
 //! TOK_NAR ::= "nix-archive-1" "(" "type"
 //! TOK_SYM ::= "symlink" "target"
 //! TOK_REG ::= "regular" "contents"
-//! TOK_EXE ::= "regular" "executable" ""
+//! TOK_EXE ::= "regular" "executable" "" "contents"
 //! TOK_DIR ::= "directory"
 //! TOK_ENT ::= "entry" "(" "name"
 //! TOK_NOD ::= "node" "(" "type"
@@ -97,7 +97,7 @@ const TOK_PAD_PAR: [u8; 24] = *b"\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0)\0\0\0\0\0\0
 #[derive(Debug)]
 pub(crate) enum PadPar {}
 
-#[cfg(feature = "async")]
+#[cfg(all(feature = "async", feature = "wire"))]
 impl crate::wire::reader::Tag for PadPar {
     const PATTERN: &'static [u8] = &TOK_PAD_PAR;
 
@@ -119,6 +119,8 @@ fn tokens() {
         (&TOK_ENT, &["entry", "(", "name"]),
         (&TOK_NOD, &["node", "(", "type"]),
         (&TOK_PAR, &[")"]),
+        #[cfg(feature = "async")]
+        (&TOK_PAD_PAR, &["", ")"]),
     ];
 
     for &(tok, xs) in cases {
diff --git a/tvix/nix-compat/src/nar/writer/sync.rs b/tvix/nix-compat/src/nar/writer/sync.rs
index 6270129028fa..584b5a7192e5 100644
--- a/tvix/nix-compat/src/nar/writer/sync.rs
+++ b/tvix/nix-compat/src/nar/writer/sync.rs
@@ -35,11 +35,8 @@ use std::io::{
     Write,
 };
 
-/// Convenience type alias for types implementing [`Write`].
-pub type Writer<'a> = dyn Write + Send + 'a;
-
 /// Create a new NAR, writing the output to the specified writer.
-pub fn open<'a, 'w: 'a>(writer: &'a mut Writer<'w>) -> io::Result<Node<'a, 'w>> {
+pub fn open<W: Write>(writer: &mut W) -> io::Result<Node<W>> {
     let mut node = Node { writer };
     node.write(&wire::TOK_NAR)?;
     Ok(node)
@@ -49,11 +46,11 @@ pub fn open<'a, 'w: 'a>(writer: &'a mut Writer<'w>) -> io::Result<Node<'a, 'w>>
 ///
 /// A NAR can be thought of as a tree of nodes represented by this type. Each
 /// node can be a file, a symlink or a directory containing other nodes.
-pub struct Node<'a, 'w: 'a> {
-    writer: &'a mut Writer<'w>,
+pub struct Node<'a, W: Write> {
+    writer: &'a mut W,
 }
 
-impl<'a, 'w> Node<'a, 'w> {
+impl<'a, W: Write> Node<'a, W> {
     fn write(&mut self, data: &[u8]) -> io::Result<()> {
         self.writer.write_all(data)
     }
@@ -128,7 +125,7 @@ impl<'a, 'w> Node<'a, 'w> {
     ///
     /// It is the caller's responsibility to invoke [`Directory::close`],
     /// or invalid archives will be produced silently.
-    pub fn directory(mut self) -> io::Result<Directory<'a, 'w>> {
+    pub fn directory(mut self) -> io::Result<Directory<'a, W>> {
         self.write(&wire::TOK_DIR)?;
         Ok(Directory::new(self))
     }
@@ -145,13 +142,13 @@ fn into_name(_name: &[u8]) -> Name {
 }
 
 /// Content of a NAR node that represents a directory.
-pub struct Directory<'a, 'w> {
-    node: Node<'a, 'w>,
+pub struct Directory<'a, W: Write> {
+    node: Node<'a, W>,
     prev_name: Option<Name>,
 }
 
-impl<'a, 'w> Directory<'a, 'w> {
-    fn new(node: Node<'a, 'w>) -> Self {
+impl<'a, W: Write> Directory<'a, W> {
+    fn new(node: Node<'a, W>) -> Self {
         Self {
             node,
             prev_name: None,
@@ -166,7 +163,7 @@ impl<'a, 'w> Directory<'a, 'w> {
     /// It is the caller's responsibility to ensure that directory entries are
     /// written in order of ascending name. If this is not ensured, this method
     /// may panic or silently produce invalid archives.
-    pub fn entry(&mut self, name: &[u8]) -> io::Result<Node<'_, 'w>> {
+    pub fn entry(&mut self, name: &[u8]) -> io::Result<Node<'_, W>> {
         debug_assert!(
             name.len() <= wire::MAX_NAME_LEN,
             "name.len() > {}",
diff --git a/tvix/nix-compat/src/narinfo/mod.rs b/tvix/nix-compat/src/narinfo/mod.rs
index b1c10bceb200..21aecf80b5a2 100644
--- a/tvix/nix-compat/src/narinfo/mod.rs
+++ b/tvix/nix-compat/src/narinfo/mod.rs
@@ -27,13 +27,15 @@ use std::{
 use crate::{nixbase32, nixhash::CAHash, store_path::StorePathRef};
 
 mod fingerprint;
-mod public_keys;
 mod signature;
+mod signing_keys;
+mod verifying_keys;
 
 pub use fingerprint::fingerprint;
-
-pub use public_keys::{Error as PubKeyError, PubKey};
-pub use signature::{Error as SignatureError, Signature};
+pub use signature::{Error as SignatureError, Signature, SignatureRef};
+pub use signing_keys::parse_keypair;
+pub use signing_keys::{Error as SigningKeyError, SigningKey};
+pub use verifying_keys::{Error as VerifyingKeyError, VerifyingKey};
 
 #[derive(Debug)]
 pub struct NarInfo<'a> {
@@ -49,7 +51,7 @@ pub struct NarInfo<'a> {
     pub references: Vec<StorePathRef<'a>>,
     // authenticity
     /// Ed25519 signature over the path fingerprint
-    pub signatures: Vec<Signature<'a>>,
+    pub signatures: Vec<SignatureRef<'a>>,
     /// Content address (for content-defined paths)
     pub ca: Option<CAHash>,
     // derivation metadata
@@ -244,7 +246,7 @@ impl<'a> NarInfo<'a> {
                     };
                 }
                 "Sig" => {
-                    let val = Signature::parse(val)
+                    let val = SignatureRef::parse(val)
                         .map_err(|e| Error::UnableToParseSignature(signatures.len(), e))?;
 
                     signatures.push(val);
@@ -297,6 +299,21 @@ impl<'a> NarInfo<'a> {
             self.references.iter(),
         )
     }
+
+    /// Adds a signature, using the passed signer to sign.
+    /// This is generic over algo implementations / providers,
+    /// so users can bring their own signers.
+    pub fn add_signature<S>(&mut self, signer: &'a SigningKey<S>)
+    where
+        S: ed25519::signature::Signer<ed25519::Signature>,
+    {
+        // calculate the fingerprint to sign
+        let fp = self.fingerprint();
+
+        let sig = signer.sign(fp.as_bytes());
+
+        self.signatures.push(sig);
+    }
 }
 
 impl Display for NarInfo<'_> {
@@ -392,6 +409,12 @@ pub enum Error {
 }
 
 #[cfg(test)]
+const DUMMY_KEYPAIR: &str = "cache.example.com-1:cCta2MEsRNuYCgWYyeRXLyfoFpKhQJKn8gLMeXWAb7vIpRKKo/3JoxJ24OYa3DxT2JVV38KjK/1ywHWuMe2JEw==";
+#[cfg(test)]
+const DUMMY_VERIFYING_KEY: &str =
+    "cache.example.com-1:yKUSiqP9yaMSduDmGtw8U9iVVd/Coyv9csB1rjHtiRM=";
+
+#[cfg(test)]
 mod test {
     use hex_literal::hex;
     use lazy_static::lazy_static;
@@ -524,4 +547,46 @@ Sig: cache.nixos.org-1:HhaiY36Uk3XV1JGe9d9xHnzAapqJXprU1YZZzSzxE97jCuO5RR7vlG2kF
             parsed.nar_hash,
         );
     }
+
+    /// Adds a signature to a NARInfo, using key material parsed from DUMMY_KEYPAIR.
+    /// It then ensures signature verification with the parsed
+    /// DUMMY_VERIFYING_KEY succeeds.
+    #[test]
+    fn sign() {
+        let mut narinfo = NarInfo::parse(
+            r#"StorePath: /nix/store/0vpqfxbkx0ffrnhbws6g9qwhmliksz7f-perl-HTTP-Cookies-6.01
+URL: nar/0i5biw0g01514llhfswxy6xfav8lxxdq1xg6ik7hgsqbpw0f06yi.nar.xz
+Compression: xz
+FileHash: sha256:0i5biw0g01514llhfswxy6xfav8lxxdq1xg6ik7hgsqbpw0f06yi
+FileSize: 7120
+NarHash: sha256:0h1bm4sj1cnfkxgyhvgi8df1qavnnv94sd0v09wcrm971602shfg
+NarSize: 22552
+References: 
+CA: fixed:r:sha1:1ak1ymbmsfx7z8kh09jzkr3a4dvkrfjw
+"#,
+        )
+        .expect("should parse");
+
+        let fp = narinfo.fingerprint();
+
+        // load our keypair from the fixtures
+        let (signing_key, _verifying_key) =
+            super::parse_keypair(super::DUMMY_KEYPAIR).expect("must succeed");
+
+        // add signature
+        narinfo.add_signature(&signing_key);
+
+        // ensure the signature is added
+        let new_sig = narinfo.signatures.last().unwrap();
+        assert_eq!(signing_key.name(), *new_sig.name());
+
+        // verify the new signature against the verifying key
+        let verifying_key = super::VerifyingKey::parse(super::DUMMY_VERIFYING_KEY)
+            .expect("parsing dummy verifying key");
+
+        assert!(
+            verifying_key.verify(&fp, new_sig),
+            "expect signature to be valid"
+        );
+    }
 }
diff --git a/tvix/nix-compat/src/narinfo/signature.rs b/tvix/nix-compat/src/narinfo/signature.rs
index fd197e771d98..33c49128c2d5 100644
--- a/tvix/nix-compat/src/narinfo/signature.rs
+++ b/tvix/nix-compat/src/narinfo/signature.rs
@@ -1,21 +1,44 @@
-use std::fmt::{self, Display};
+use std::{
+    fmt::{self, Display},
+    ops::Deref,
+};
 
 use data_encoding::BASE64;
-use ed25519_dalek::SIGNATURE_LENGTH;
 use serde::{Deserialize, Serialize};
 
+const SIGNATURE_LENGTH: usize = std::mem::size_of::<ed25519::SignatureBytes>();
+
 #[derive(Clone, Debug, Eq, PartialEq)]
-pub struct Signature<'a> {
-    name: &'a str,
-    bytes: [u8; SIGNATURE_LENGTH],
+pub struct Signature<S> {
+    name: S,
+    bytes: ed25519::SignatureBytes,
 }
 
-impl<'a> Signature<'a> {
-    pub fn new(name: &'a str, bytes: [u8; SIGNATURE_LENGTH]) -> Self {
+/// Type alias of a [Signature] using a `&str` as `name` field.
+pub type SignatureRef<'a> = Signature<&'a str>;
+
+/// Represents the signatures that Nix emits.
+/// It consists of a name (an identifier for a public key), and an ed25519
+/// signature (64 bytes).
+/// It is generic over the string type that's used for the name, and there's
+/// [SignatureRef] as a type alias for one containing &str.
+impl<S> Signature<S>
+where
+    S: Deref<Target = str>,
+{
+    /// Constructs a new [Signature] from a name and public key.
+    pub fn new(name: S, bytes: ed25519::SignatureBytes) -> Self {
         Self { name, bytes }
     }
 
-    pub fn parse(input: &'a str) -> Result<Self, Error> {
+    /// Parses a [Signature] from a string containing the name, a colon, and 64
+    /// base64-encoded bytes (plus padding).
+    /// These strings are commonly seen in the `Signature:` field of a NARInfo
+    /// file.
+    pub fn parse<'a>(input: &'a str) -> Result<Self, Error>
+    where
+        S: From<&'a str>,
+    {
         let (name, bytes64) = input.split_once(':').ok_or(Error::MissingSeparator)?;
 
         if name.is_empty()
@@ -39,14 +62,19 @@ impl<'a> Signature<'a> {
             Err(_) => return Err(Error::DecodeError(input.to_string())),
         }
 
-        Ok(Signature { name, bytes })
+        Ok(Self {
+            name: name.into(),
+            bytes,
+        })
     }
 
-    pub fn name(&self) -> &'a str {
-        self.name
+    /// Returns the name field of the signature.
+    pub fn name(&self) -> &S {
+        &self.name
     }
 
-    pub fn bytes(&self) -> &[u8; SIGNATURE_LENGTH] {
+    /// Returns the 64 bytes of signatures.
+    pub fn bytes(&self) -> &ed25519::SignatureBytes {
         &self.bytes
     }
 
@@ -56,9 +84,21 @@ impl<'a> Signature<'a> {
 
         verifying_key.verify_strict(fingerprint, &signature).is_ok()
     }
+
+    /// Constructs a [SignatureRef] from this signature.
+    pub fn as_ref(&self) -> SignatureRef<'_> {
+        SignatureRef {
+            name: self.name.deref(),
+            bytes: self.bytes,
+        }
+    }
 }
 
-impl<'de: 'a, 'a> Deserialize<'de> for Signature<'a> {
+impl<'a, 'de, S> Deserialize<'de> for Signature<S>
+where
+    S: Deref<Target = str> + From<&'a str>,
+    'de: 'a,
+{
     fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
     where
         D: serde::Deserializer<'de>,
@@ -70,10 +110,13 @@ impl<'de: 'a, 'a> Deserialize<'de> for Signature<'a> {
     }
 }
 
-impl<'a> Serialize for Signature<'a> {
-    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+impl<S: Display> Serialize for Signature<S>
+where
+    S: Deref<Target = str>,
+{
+    fn serialize<SR>(&self, serializer: SR) -> Result<SR::Ok, SR::Error>
     where
-        S: serde::Serializer,
+        SR: serde::Serializer,
     {
         let string: String = self.to_string();
 
@@ -81,6 +124,15 @@ impl<'a> Serialize for Signature<'a> {
     }
 }
 
+impl<S> Display for Signature<S>
+where
+    S: Display,
+{
+    fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
+        write!(w, "{}:{}", self.name, BASE64.encode(&self.bytes))
+    }
+}
+
 #[derive(Debug, thiserror::Error)]
 pub enum Error {
     #[error("Invalid name: {0}")]
@@ -93,12 +145,6 @@ pub enum Error {
     DecodeError(String),
 }
 
-impl Display for Signature<'_> {
-    fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
-        write!(w, "{}:{}", self.name, BASE64.encode(&self.bytes))
-    }
-}
-
 #[cfg(test)]
 mod test {
     use data_encoding::BASE64;
@@ -143,7 +189,7 @@ mod test {
         #[case] fp: &str,
         #[case] expect_valid: bool,
     ) {
-        let sig = Signature::parse(sig_str).expect("must parse");
+        let sig = Signature::<&str>::parse(sig_str).expect("must parse");
         assert_eq!(expect_valid, sig.verify(fp.as_bytes(), verifying_key));
     }
 
@@ -158,7 +204,7 @@ mod test {
         "u01BybwQhyI5H1bW1EIWXssMDhDDIvXOG5uh8Qzgdyjz6U1qg6DHhMAvXZOUStIj6X5t4/ufFgR8i3fjf0bMAw=="
     )]
     fn parse_fail(#[case] input: &'static str) {
-        Signature::parse(input).expect_err("must fail");
+        Signature::<&str>::parse(input).expect_err("must fail");
     }
 
     #[test]
@@ -177,8 +223,29 @@ mod test {
         let serialized = serde_json::to_string(&signature_actual).expect("must serialize");
         assert_eq!(signature_str_json, &serialized);
 
-        let deserialized: Signature<'_> =
+        let deserialized: Signature<&str> =
             serde_json::from_str(signature_str_json).expect("must deserialize");
         assert_eq!(&signature_actual, &deserialized);
     }
+
+    /// Construct a [Signature], using different String types for the name field.
+    #[test]
+    fn signature_owned() {
+        let signature1 = Signature::<String>::parse("cache.nixos.org-1:TsTTb3WGTZKphvYdBHXwo6weVILmTytUjLB+vcX89fOjjRicCHmKA4RCPMVLkj6TMJ4GMX3HPVWRdD1hkeKZBQ==").expect("must parse");
+        let signature2 = Signature::<smol_str::SmolStr>::parse("cache.nixos.org-1:TsTTb3WGTZKphvYdBHXwo6weVILmTytUjLB+vcX89fOjjRicCHmKA4RCPMVLkj6TMJ4GMX3HPVWRdD1hkeKZBQ==").expect("must parse");
+        let signature3 = Signature::<&str>::parse("cache.nixos.org-1:TsTTb3WGTZKphvYdBHXwo6weVILmTytUjLB+vcX89fOjjRicCHmKA4RCPMVLkj6TMJ4GMX3HPVWRdD1hkeKZBQ==").expect("must parse");
+
+        assert!(
+            signature1.verify(FINGERPRINT.as_bytes(), &PUB_CACHE_NIXOS_ORG_1),
+            "must verify"
+        );
+        assert!(
+            signature2.verify(FINGERPRINT.as_bytes(), &PUB_CACHE_NIXOS_ORG_1),
+            "must verify"
+        );
+        assert!(
+            signature3.verify(FINGERPRINT.as_bytes(), &PUB_CACHE_NIXOS_ORG_1),
+            "must verify"
+        );
+    }
 }
diff --git a/tvix/nix-compat/src/narinfo/signing_keys.rs b/tvix/nix-compat/src/narinfo/signing_keys.rs
new file mode 100644
index 000000000000..cf513b7ba475
--- /dev/null
+++ b/tvix/nix-compat/src/narinfo/signing_keys.rs
@@ -0,0 +1,119 @@
+//! This module provides tooling to parse private key (pairs) produced by Nix
+//! and its
+//! `nix-store --generate-binary-cache-key name path.secret path.pub` command.
+//! It produces `ed25519_dalek` keys, but the `NarInfo::add_signature` function
+//! is generic, allowing other signers.
+
+use data_encoding::BASE64;
+use ed25519_dalek::{PUBLIC_KEY_LENGTH, SECRET_KEY_LENGTH};
+
+use super::{SignatureRef, VerifyingKey};
+
+pub struct SigningKey<S> {
+    name: String,
+    signing_key: S,
+}
+
+impl<S> SigningKey<S>
+where
+    S: ed25519::signature::Signer<ed25519::Signature>,
+{
+    /// Constructs a singing key, using a name and a signing key.
+    pub fn new(name: String, signing_key: S) -> Self {
+        Self { name, signing_key }
+    }
+
+    /// Signs a fingerprint using the internal signing key, returns the [SignatureRef]
+    pub(crate) fn sign<'a>(&'a self, fp: &[u8]) -> SignatureRef<'a> {
+        SignatureRef::new(&self.name, self.signing_key.sign(fp).to_bytes())
+    }
+
+    pub fn name(&self) -> &str {
+        &self.name
+    }
+}
+
+/// Parses a SigningKey / VerifyingKey from a byte slice in the format that Nix uses.
+pub fn parse_keypair(
+    input: &str,
+) -> Result<(SigningKey<ed25519_dalek::SigningKey>, VerifyingKey), Error> {
+    let (name, bytes64) = input.split_once(':').ok_or(Error::MissingSeparator)?;
+
+    if name.is_empty()
+        || !name
+            .chars()
+            .all(|c| char::is_alphanumeric(c) || c == '-' || c == '.')
+    {
+        return Err(Error::InvalidName(name.to_string()));
+    }
+
+    const DECODED_BYTES_LEN: usize = SECRET_KEY_LENGTH + PUBLIC_KEY_LENGTH;
+    if bytes64.len() != BASE64.encode_len(DECODED_BYTES_LEN) {
+        return Err(Error::InvalidSigningKeyLen(bytes64.len()));
+    }
+
+    let mut buf = [0; DECODED_BYTES_LEN + 2]; // 64 bytes + 2 bytes padding
+    let mut bytes = [0; DECODED_BYTES_LEN];
+    match BASE64.decode_mut(bytes64.as_bytes(), &mut buf) {
+        Ok(len) if len == DECODED_BYTES_LEN => {
+            bytes.copy_from_slice(&buf[..DECODED_BYTES_LEN]);
+        }
+        Ok(_) => unreachable!(),
+        // keeping DecodePartial gets annoying lifetime-wise
+        Err(_) => return Err(Error::DecodeError(input.to_string())),
+    }
+
+    let bytes_signing_key: [u8; SECRET_KEY_LENGTH] = {
+        let mut b = [0u8; SECRET_KEY_LENGTH];
+        b.copy_from_slice(&bytes[0..SECRET_KEY_LENGTH]);
+        b
+    };
+    let bytes_verifying_key: [u8; PUBLIC_KEY_LENGTH] = {
+        let mut b = [0u8; PUBLIC_KEY_LENGTH];
+        b.copy_from_slice(&bytes[SECRET_KEY_LENGTH..]);
+        b
+    };
+
+    let signing_key = SigningKey::new(
+        name.to_string(),
+        ed25519_dalek::SigningKey::from_bytes(&bytes_signing_key),
+    );
+
+    let verifying_key = VerifyingKey::new(
+        name.to_string(),
+        ed25519_dalek::VerifyingKey::from_bytes(&bytes_verifying_key)
+            .map_err(Error::InvalidVerifyingKey)?,
+    );
+
+    Ok((signing_key, verifying_key))
+}
+
+#[derive(Debug, thiserror::Error)]
+pub enum Error {
+    #[error("Invalid name: {0}")]
+    InvalidName(String),
+    #[error("Missing separator")]
+    MissingSeparator,
+    #[error("Invalid signing key len: {0}")]
+    InvalidSigningKeyLen(usize),
+    #[error("Unable to base64-decode signing key: {0}")]
+    DecodeError(String),
+    #[error("VerifyingKey error: {0}")]
+    InvalidVerifyingKey(ed25519_dalek::SignatureError),
+}
+
+#[cfg(test)]
+mod test {
+    use crate::narinfo::DUMMY_KEYPAIR;
+    #[test]
+    fn parse() {
+        let (_signing_key, _verifying_key) =
+            super::parse_keypair(DUMMY_KEYPAIR).expect("must succeed");
+    }
+
+    #[test]
+    fn parse_fail() {
+        assert!(super::parse_keypair("cache.example.com-1:cCta2MEsRNuYCgWYyeRXLyfoFpKhQJKn8gLMeXWAb7vIpRKKo/3JoxJ24OYa3DxT2JVV38KjK/1ywHWuMe2JE").is_err());
+        assert!(super::parse_keypair("cache.example.com-1cCta2MEsRNuYCgWYyeRXLyfoFpKhQJKn8gLMeXWAb7vIpRKKo/3JoxJ24OYa3DxT2JVV38KjK/1ywHWuMe2JE").is_err());
+    }
+}
diff --git a/tvix/nix-compat/src/narinfo/public_keys.rs b/tvix/nix-compat/src/narinfo/verifying_keys.rs
index 27dd90e096db..67ef2e3a459c 100644
--- a/tvix/nix-compat/src/narinfo/public_keys.rs
+++ b/tvix/nix-compat/src/narinfo/verifying_keys.rs
@@ -4,21 +4,21 @@
 use std::fmt::Display;
 
 use data_encoding::BASE64;
-use ed25519_dalek::{VerifyingKey, PUBLIC_KEY_LENGTH};
+use ed25519_dalek::PUBLIC_KEY_LENGTH;
 
-use super::Signature;
+use super::SignatureRef;
 
 /// This represents a ed25519 public key and "name".
 /// These are normally passed in the `trusted-public-keys` Nix config option,
 /// and consist of a name and base64-encoded ed25519 pubkey, separated by a `:`.
-#[derive(Debug)]
-pub struct PubKey {
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct VerifyingKey {
     name: String,
-    verifying_key: VerifyingKey,
+    verifying_key: ed25519_dalek::VerifyingKey,
 }
 
-impl PubKey {
-    pub fn new(name: String, verifying_key: VerifyingKey) -> Self {
+impl VerifyingKey {
+    pub fn new(name: String, verifying_key: ed25519_dalek::VerifyingKey) -> Self {
         Self {
             name,
             verifying_key,
@@ -37,7 +37,7 @@ impl PubKey {
         }
 
         if bytes64.len() != BASE64.encode_len(PUBLIC_KEY_LENGTH) {
-            return Err(Error::InvalidPubKeyLen(bytes64.len()));
+            return Err(Error::InvalidVerifyingKeyLen(bytes64.len()));
         }
 
         let mut buf = [0; PUBLIC_KEY_LENGTH + 1];
@@ -51,7 +51,8 @@ impl PubKey {
             Err(_) => return Err(Error::DecodeError(input.to_string())),
         }
 
-        let verifying_key = VerifyingKey::from_bytes(&bytes).map_err(Error::InvalidVerifyingKey)?;
+        let verifying_key =
+            ed25519_dalek::VerifyingKey::from_bytes(&bytes).map_err(Error::InvalidVerifyingKey)?;
 
         Ok(Self {
             name: name.to_string(),
@@ -68,8 +69,8 @@ impl PubKey {
     /// which means the name in the signature has to match,
     /// and the signature bytes themselves need to be a valid signature made by
     /// the signing key identified by [Self::verifying key].
-    pub fn verify(&self, fingerprint: &str, signature: &Signature) -> bool {
-        if self.name() != signature.name() {
+    pub fn verify(&self, fingerprint: &str, signature: &SignatureRef<'_>) -> bool {
+        if self.name() != *signature.name() {
             return false;
         }
 
@@ -84,14 +85,14 @@ pub enum Error {
     #[error("Missing separator")]
     MissingSeparator,
     #[error("Invalid pubkey len: {0}")]
-    InvalidPubKeyLen(usize),
+    InvalidVerifyingKeyLen(usize),
     #[error("VerifyingKey error: {0}")]
     InvalidVerifyingKey(ed25519_dalek::SignatureError),
     #[error("Unable to base64-decode pubkey: {0}")]
     DecodeError(String),
 }
 
-impl Display for PubKey {
+impl Display for VerifyingKey {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         write!(
             f,
@@ -108,9 +109,9 @@ mod test {
     use ed25519_dalek::PUBLIC_KEY_LENGTH;
     use rstest::rstest;
 
-    use crate::narinfo::Signature;
+    use crate::narinfo::SignatureRef;
 
-    use super::PubKey;
+    use super::VerifyingKey;
     const FINGERPRINT: &str = "1;/nix/store/syd87l2rxw8cbsxmxl853h0r6pdwhwjr-curl-7.82.0-bin;sha256:1b4sb93wp679q4zx9k1ignby1yna3z7c4c2ri3wphylbc2dwsys0;196040;/nix/store/0jqd0rlxzra1rs38rdxl43yh6rxchgc6-curl-7.82.0,/nix/store/6w8g7njm4mck5dmjxws0z1xnrxvl81xa-glibc-2.34-115,/nix/store/j5jxw3iy7bbz4a57fh9g2xm2gxmyal8h-zlib-1.2.12,/nix/store/yxvjs9drzsphm9pcf42a4byzj1kb9m7k-openssl-1.1.1n";
 
     #[rstest]
@@ -122,7 +123,7 @@ mod test {
         #[case] exp_name: &'static str,
         #[case] exp_verifying_key_bytes: &[u8; PUBLIC_KEY_LENGTH],
     ) {
-        let pubkey = PubKey::parse(input).expect("must parse");
+        let pubkey = VerifyingKey::parse(input).expect("must parse");
         assert_eq!(exp_name, pubkey.name());
         assert_eq!(exp_verifying_key_bytes, pubkey.verifying_key.as_bytes());
     }
@@ -132,7 +133,7 @@ mod test {
     #[case::missing_padding("cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY")]
     #[case::wrong_length("cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDS")]
     fn parse_fail(#[case] input: &'static str) {
-        PubKey::parse(input).expect_err("must fail");
+        VerifyingKey::parse(input).expect_err("must fail");
     }
 
     #[rstest]
@@ -144,8 +145,8 @@ mod test {
         #[case] signature_str: &'static str,
         #[case] expected: bool,
     ) {
-        let pubkey = PubKey::parse(pubkey_str).expect("must parse");
-        let signature = Signature::parse(signature_str).expect("must parse");
+        let pubkey = VerifyingKey::parse(pubkey_str).expect("must parse");
+        let signature = SignatureRef::parse(signature_str).expect("must parse");
 
         assert_eq!(expected, pubkey.verify(fingerprint, &signature));
     }
diff --git a/tvix/nix-compat/src/nix_daemon/de/bytes.rs b/tvix/nix-compat/src/nix_daemon/de/bytes.rs
new file mode 100644
index 000000000000..7daced54eef7
--- /dev/null
+++ b/tvix/nix-compat/src/nix_daemon/de/bytes.rs
@@ -0,0 +1,70 @@
+use bytes::Bytes;
+
+use super::{Error, NixDeserialize, NixRead};
+
+impl NixDeserialize for Bytes {
+    async fn try_deserialize<R>(reader: &mut R) -> Result<Option<Self>, R::Error>
+    where
+        R: ?Sized + NixRead + Send,
+    {
+        reader.try_read_bytes().await
+    }
+}
+
+impl NixDeserialize for String {
+    async fn try_deserialize<R>(reader: &mut R) -> Result<Option<Self>, R::Error>
+    where
+        R: ?Sized + NixRead + Send,
+    {
+        if let Some(buf) = reader.try_read_bytes().await? {
+            String::from_utf8(buf.to_vec())
+                .map_err(R::Error::invalid_data)
+                .map(Some)
+        } else {
+            Ok(None)
+        }
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use std::io;
+
+    use hex_literal::hex;
+    use rstest::rstest;
+    use tokio_test::io::Builder;
+
+    use crate::nix_daemon::de::{NixRead, NixReader};
+
+    #[rstest]
+    #[case::empty("", &hex!("0000 0000 0000 0000"))]
+    #[case::one(")", &hex!("0100 0000 0000 0000 2900 0000 0000 0000"))]
+    #[case::two("it", &hex!("0200 0000 0000 0000 6974 0000 0000 0000"))]
+    #[case::three("tea", &hex!("0300 0000 0000 0000 7465 6100 0000 0000"))]
+    #[case::four("were", &hex!("0400 0000 0000 0000 7765 7265 0000 0000"))]
+    #[case::five("where", &hex!("0500 0000 0000 0000 7768 6572 6500 0000"))]
+    #[case::six("unwrap", &hex!("0600 0000 0000 0000 756E 7772 6170 0000"))]
+    #[case::seven("where's", &hex!("0700 0000 0000 0000 7768 6572 6527 7300"))]
+    #[case::aligned("read_tea", &hex!("0800 0000 0000 0000 7265 6164 5F74 6561"))]
+    #[case::more_bytes("read_tess", &hex!("0900 0000 0000 0000 7265 6164 5F74 6573 7300 0000 0000 0000"))]
+    #[case::utf8("The quick brown ๐ŸฆŠ jumps over 13 lazy ๐Ÿถ.", &hex!("2D00 0000 0000 0000  5468 6520 7175 6963  6b20 6272 6f77 6e20  f09f a68a 206a 756d  7073 206f 7665 7220  3133 206c 617a 7920  f09f 90b6 2e00 0000"))]
+    #[tokio::test]
+    async fn test_read_string(#[case] expected: &str, #[case] data: &[u8]) {
+        let mock = Builder::new().read(data).build();
+        let mut reader = NixReader::new(mock);
+        let actual: String = reader.read_value().await.unwrap();
+        assert_eq!(actual, expected);
+    }
+
+    #[tokio::test]
+    async fn test_read_string_invalid() {
+        let mock = Builder::new()
+            .read(&hex!("0300 0000 0000 0000 EDA0 8000 0000 0000"))
+            .build();
+        let mut reader = NixReader::new(mock);
+        assert_eq!(
+            io::ErrorKind::InvalidData,
+            reader.read_value::<String>().await.unwrap_err().kind()
+        );
+    }
+}
diff --git a/tvix/nix-compat/src/nix_daemon/de/collections.rs b/tvix/nix-compat/src/nix_daemon/de/collections.rs
new file mode 100644
index 000000000000..cf79f584506a
--- /dev/null
+++ b/tvix/nix-compat/src/nix_daemon/de/collections.rs
@@ -0,0 +1,105 @@
+use std::{collections::BTreeMap, future::Future};
+
+use super::{NixDeserialize, NixRead};
+
+#[allow(clippy::manual_async_fn)]
+impl<T> NixDeserialize for Vec<T>
+where
+    T: NixDeserialize + Send,
+{
+    fn try_deserialize<R>(
+        reader: &mut R,
+    ) -> impl Future<Output = Result<Option<Self>, R::Error>> + Send + '_
+    where
+        R: ?Sized + NixRead + Send,
+    {
+        async move {
+            if let Some(len) = reader.try_read_value::<usize>().await? {
+                let mut ret = Vec::with_capacity(len);
+                for _ in 0..len {
+                    ret.push(reader.read_value().await?);
+                }
+                Ok(Some(ret))
+            } else {
+                Ok(None)
+            }
+        }
+    }
+}
+
+#[allow(clippy::manual_async_fn)]
+impl<K, V> NixDeserialize for BTreeMap<K, V>
+where
+    K: NixDeserialize + Ord + Send,
+    V: NixDeserialize + Send,
+{
+    fn try_deserialize<R>(
+        reader: &mut R,
+    ) -> impl Future<Output = Result<Option<Self>, R::Error>> + Send + '_
+    where
+        R: ?Sized + NixRead + Send,
+    {
+        async move {
+            if let Some(len) = reader.try_read_value::<usize>().await? {
+                let mut ret = BTreeMap::new();
+                for _ in 0..len {
+                    let key = reader.read_value().await?;
+                    let value = reader.read_value().await?;
+                    ret.insert(key, value);
+                }
+                Ok(Some(ret))
+            } else {
+                Ok(None)
+            }
+        }
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use std::collections::BTreeMap;
+    use std::fmt;
+
+    use hex_literal::hex;
+    use rstest::rstest;
+    use tokio_test::io::Builder;
+
+    use crate::nix_daemon::de::{NixDeserialize, NixRead, NixReader};
+
+    #[rstest]
+    #[case::empty(vec![], &hex!("0000 0000 0000 0000"))]
+    #[case::one(vec![0x29], &hex!("0100 0000 0000 0000 2900 0000 0000 0000"))]
+    #[case::two(vec![0x7469, 10], &hex!("0200 0000 0000 0000 6974 0000 0000 0000 0A00 0000 0000 0000"))]
+    #[tokio::test]
+    async fn test_read_small_vec(#[case] expected: Vec<usize>, #[case] data: &[u8]) {
+        let mock = Builder::new().read(data).build();
+        let mut reader = NixReader::new(mock);
+        let actual: Vec<usize> = reader.read_value().await.unwrap();
+        assert_eq!(actual, expected);
+    }
+
+    fn empty_map() -> BTreeMap<usize, u64> {
+        BTreeMap::new()
+    }
+    macro_rules! map {
+        ($($key:expr => $value:expr),*) => {{
+            let mut ret = BTreeMap::new();
+            $(ret.insert($key, $value);)*
+            ret
+        }};
+    }
+
+    #[rstest]
+    #[case::empty(empty_map(), &hex!("0000 0000 0000 0000"))]
+    #[case::one(map![0x7469usize => 10u64], &hex!("0100 0000 0000 0000 6974 0000 0000 0000 0A00 0000 0000 0000"))]
+    #[tokio::test]
+    async fn test_read_small_btree_map<E>(#[case] expected: E, #[case] data: &[u8])
+    where
+        E: NixDeserialize + PartialEq + fmt::Debug,
+    {
+        let mock = Builder::new().read(data).build();
+        let mut reader = NixReader::new(mock);
+        let actual: E = reader.read_value().await.unwrap();
+        assert_eq!(actual, expected);
+    }
+}
diff --git a/tvix/nix-compat/src/nix_daemon/de/int.rs b/tvix/nix-compat/src/nix_daemon/de/int.rs
new file mode 100644
index 000000000000..eecf641cfe99
--- /dev/null
+++ b/tvix/nix-compat/src/nix_daemon/de/int.rs
@@ -0,0 +1,100 @@
+use super::{Error, NixDeserialize, NixRead};
+
+impl NixDeserialize for u64 {
+    async fn try_deserialize<R>(reader: &mut R) -> Result<Option<Self>, R::Error>
+    where
+        R: ?Sized + NixRead + Send,
+    {
+        reader.try_read_number().await
+    }
+}
+
+impl NixDeserialize for usize {
+    async fn try_deserialize<R>(reader: &mut R) -> Result<Option<Self>, R::Error>
+    where
+        R: ?Sized + NixRead + Send,
+    {
+        if let Some(value) = reader.try_read_number().await? {
+            value.try_into().map_err(R::Error::invalid_data).map(Some)
+        } else {
+            Ok(None)
+        }
+    }
+}
+
+impl NixDeserialize for bool {
+    async fn try_deserialize<R>(reader: &mut R) -> Result<Option<Self>, R::Error>
+    where
+        R: ?Sized + NixRead + Send,
+    {
+        Ok(reader.try_read_number().await?.map(|v| v != 0))
+    }
+}
+impl NixDeserialize for i64 {
+    async fn try_deserialize<R>(reader: &mut R) -> Result<Option<Self>, R::Error>
+    where
+        R: ?Sized + NixRead + Send,
+    {
+        Ok(reader.try_read_number().await?.map(|v| v as i64))
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use hex_literal::hex;
+    use rstest::rstest;
+    use tokio_test::io::Builder;
+
+    use crate::nix_daemon::de::{NixRead, NixReader};
+
+    #[rstest]
+    #[case::simple_false(false, &hex!("0000 0000 0000 0000"))]
+    #[case::simple_true(true, &hex!("0100 0000 0000 0000"))]
+    #[case::other_true(true, &hex!("1234 5600 0000 0000"))]
+    #[case::max_true(true, &hex!("FFFF FFFF FFFF FFFF"))]
+    #[tokio::test]
+    async fn test_read_bool(#[case] expected: bool, #[case] data: &[u8]) {
+        let mock = Builder::new().read(data).build();
+        let mut reader = NixReader::new(mock);
+        let actual: bool = reader.read_value().await.unwrap();
+        assert_eq!(actual, expected);
+    }
+
+    #[rstest]
+    #[case::zero(0, &hex!("0000 0000 0000 0000"))]
+    #[case::one(1, &hex!("0100 0000 0000 0000"))]
+    #[case::other(0x563412, &hex!("1234 5600 0000 0000"))]
+    #[case::max_value(u64::MAX, &hex!("FFFF FFFF FFFF FFFF"))]
+    #[tokio::test]
+    async fn test_read_u64(#[case] expected: u64, #[case] data: &[u8]) {
+        let mock = Builder::new().read(data).build();
+        let mut reader = NixReader::new(mock);
+        let actual: u64 = reader.read_value().await.unwrap();
+        assert_eq!(actual, expected);
+    }
+
+    #[rstest]
+    #[case::zero(0, &hex!("0000 0000 0000 0000"))]
+    #[case::one(1, &hex!("0100 0000 0000 0000"))]
+    #[case::other(0x563412, &hex!("1234 5600 0000 0000"))]
+    #[case::max_value(usize::MAX, &usize::MAX.to_le_bytes())]
+    #[tokio::test]
+    async fn test_read_usize(#[case] expected: usize, #[case] data: &[u8]) {
+        let mock = Builder::new().read(data).build();
+        let mut reader = NixReader::new(mock);
+        let actual: usize = reader.read_value().await.unwrap();
+        assert_eq!(actual, expected);
+    }
+
+    // FUTUREWORK: Test this on supported hardware
+    #[tokio::test]
+    #[cfg(any(target_pointer_width = "16", target_pointer_width = "32"))]
+    async fn test_read_usize_overflow() {
+        let mock = Builder::new().read(&u64::MAX.to_le_bytes()).build();
+        let mut reader = NixReader::new(mock);
+        assert_eq!(
+            std::io::ErrorKind::InvalidData,
+            reader.read_value::<usize>().await.unwrap_err().kind()
+        );
+    }
+}
diff --git a/tvix/nix-compat/src/nix_daemon/de/mock.rs b/tvix/nix-compat/src/nix_daemon/de/mock.rs
new file mode 100644
index 000000000000..31cc3a4897ba
--- /dev/null
+++ b/tvix/nix-compat/src/nix_daemon/de/mock.rs
@@ -0,0 +1,261 @@
+use std::collections::VecDeque;
+use std::fmt;
+use std::io;
+use std::thread;
+
+use bytes::Bytes;
+use thiserror::Error;
+
+use crate::nix_daemon::ProtocolVersion;
+
+use super::NixRead;
+
+#[derive(Debug, Error, PartialEq, Eq, Clone)]
+pub enum Error {
+    #[error("custom error '{0}'")]
+    Custom(String),
+    #[error("invalid data '{0}'")]
+    InvalidData(String),
+    #[error("missing data '{0}'")]
+    MissingData(String),
+    #[error("IO error {0} '{1}'")]
+    IO(io::ErrorKind, String),
+    #[error("wrong read: expected {0} got {1}")]
+    WrongRead(OperationType, OperationType),
+}
+
+impl Error {
+    pub fn expected_read_number() -> Error {
+        Error::WrongRead(OperationType::ReadNumber, OperationType::ReadBytes)
+    }
+
+    pub fn expected_read_bytes() -> Error {
+        Error::WrongRead(OperationType::ReadBytes, OperationType::ReadNumber)
+    }
+}
+
+impl super::Error for Error {
+    fn custom<T: fmt::Display>(msg: T) -> Self {
+        Self::Custom(msg.to_string())
+    }
+
+    fn io_error(err: std::io::Error) -> Self {
+        Self::IO(err.kind(), err.to_string())
+    }
+
+    fn invalid_data<T: fmt::Display>(msg: T) -> Self {
+        Self::InvalidData(msg.to_string())
+    }
+
+    fn missing_data<T: fmt::Display>(msg: T) -> Self {
+        Self::MissingData(msg.to_string())
+    }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum OperationType {
+    ReadNumber,
+    ReadBytes,
+}
+
+impl fmt::Display for OperationType {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match self {
+            Self::ReadNumber => write!(f, "read_number"),
+            Self::ReadBytes => write!(f, "read_bytess"),
+        }
+    }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+enum Operation {
+    ReadNumber(Result<u64, Error>),
+    ReadBytes(Result<Bytes, Error>),
+}
+
+impl From<Operation> for OperationType {
+    fn from(value: Operation) -> Self {
+        match value {
+            Operation::ReadNumber(_) => OperationType::ReadNumber,
+            Operation::ReadBytes(_) => OperationType::ReadBytes,
+        }
+    }
+}
+
+pub struct Builder {
+    version: ProtocolVersion,
+    ops: VecDeque<Operation>,
+}
+
+impl Builder {
+    pub fn new() -> Builder {
+        Builder {
+            version: Default::default(),
+            ops: VecDeque::new(),
+        }
+    }
+
+    pub fn version<V: Into<ProtocolVersion>>(&mut self, version: V) -> &mut Self {
+        self.version = version.into();
+        self
+    }
+
+    pub fn read_number(&mut self, value: u64) -> &mut Self {
+        self.ops.push_back(Operation::ReadNumber(Ok(value)));
+        self
+    }
+
+    pub fn read_number_error(&mut self, err: Error) -> &mut Self {
+        self.ops.push_back(Operation::ReadNumber(Err(err)));
+        self
+    }
+
+    pub fn read_bytes(&mut self, value: Bytes) -> &mut Self {
+        self.ops.push_back(Operation::ReadBytes(Ok(value)));
+        self
+    }
+
+    pub fn read_slice(&mut self, data: &[u8]) -> &mut Self {
+        let value = Bytes::copy_from_slice(data);
+        self.ops.push_back(Operation::ReadBytes(Ok(value)));
+        self
+    }
+
+    pub fn read_bytes_error(&mut self, err: Error) -> &mut Self {
+        self.ops.push_back(Operation::ReadBytes(Err(err)));
+        self
+    }
+
+    pub fn build(&mut self) -> Mock {
+        Mock {
+            version: self.version,
+            ops: self.ops.clone(),
+        }
+    }
+}
+
+impl Default for Builder {
+    fn default() -> Self {
+        Self::new()
+    }
+}
+
+pub struct Mock {
+    version: ProtocolVersion,
+    ops: VecDeque<Operation>,
+}
+
+impl NixRead for Mock {
+    type Error = Error;
+
+    fn version(&self) -> ProtocolVersion {
+        self.version
+    }
+
+    async fn try_read_number(&mut self) -> Result<Option<u64>, Self::Error> {
+        match self.ops.pop_front() {
+            Some(Operation::ReadNumber(ret)) => ret.map(Some),
+            Some(Operation::ReadBytes(_)) => Err(Error::expected_read_bytes()),
+            None => Ok(None),
+        }
+    }
+
+    async fn try_read_bytes_limited(
+        &mut self,
+        _limit: std::ops::RangeInclusive<usize>,
+    ) -> Result<Option<Bytes>, Self::Error> {
+        match self.ops.pop_front() {
+            Some(Operation::ReadBytes(ret)) => ret.map(Some),
+            Some(Operation::ReadNumber(_)) => Err(Error::expected_read_number()),
+            None => Ok(None),
+        }
+    }
+}
+
+impl Drop for Mock {
+    fn drop(&mut self) {
+        // No need to panic again
+        if thread::panicking() {
+            return;
+        }
+        if let Some(op) = self.ops.front() {
+            panic!("reader dropped with {op:?} operation still unread")
+        }
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use bytes::Bytes;
+    use hex_literal::hex;
+
+    use crate::nix_daemon::de::NixRead;
+
+    use super::{Builder, Error};
+
+    #[tokio::test]
+    async fn read_slice() {
+        let mut mock = Builder::new()
+            .read_number(10)
+            .read_slice(&[])
+            .read_slice(&hex!("0000 1234 5678 9ABC DEFF"))
+            .build();
+        assert_eq!(10, mock.read_number().await.unwrap());
+        assert_eq!(&[] as &[u8], &mock.read_bytes().await.unwrap()[..]);
+        assert_eq!(
+            &hex!("0000 1234 5678 9ABC DEFF"),
+            &mock.read_bytes().await.unwrap()[..]
+        );
+        assert_eq!(None, mock.try_read_number().await.unwrap());
+        assert_eq!(None, mock.try_read_bytes().await.unwrap());
+    }
+
+    #[tokio::test]
+    async fn read_bytes() {
+        let mut mock = Builder::new()
+            .read_number(10)
+            .read_bytes(Bytes::from_static(&[]))
+            .read_bytes(Bytes::from_static(&hex!("0000 1234 5678 9ABC DEFF")))
+            .build();
+        assert_eq!(10, mock.read_number().await.unwrap());
+        assert_eq!(&[] as &[u8], &mock.read_bytes().await.unwrap()[..]);
+        assert_eq!(
+            &hex!("0000 1234 5678 9ABC DEFF"),
+            &mock.read_bytes().await.unwrap()[..]
+        );
+        assert_eq!(None, mock.try_read_number().await.unwrap());
+        assert_eq!(None, mock.try_read_bytes().await.unwrap());
+    }
+
+    #[tokio::test]
+    async fn read_number() {
+        let mut mock = Builder::new().read_number(10).build();
+        assert_eq!(10, mock.read_number().await.unwrap());
+        assert_eq!(None, mock.try_read_number().await.unwrap());
+        assert_eq!(None, mock.try_read_bytes().await.unwrap());
+    }
+
+    #[tokio::test]
+    async fn expect_number() {
+        let mut mock = Builder::new().read_number(10).build();
+        assert_eq!(
+            Error::expected_read_number(),
+            mock.read_bytes().await.unwrap_err()
+        );
+    }
+
+    #[tokio::test]
+    async fn expect_bytes() {
+        let mut mock = Builder::new().read_slice(&[]).build();
+        assert_eq!(
+            Error::expected_read_bytes(),
+            mock.read_number().await.unwrap_err()
+        );
+    }
+
+    #[test]
+    #[should_panic]
+    fn operations_left() {
+        let _ = Builder::new().read_number(10).build();
+    }
+}
diff --git a/tvix/nix-compat/src/nix_daemon/de/mod.rs b/tvix/nix-compat/src/nix_daemon/de/mod.rs
new file mode 100644
index 000000000000..f85ccd8fea0e
--- /dev/null
+++ b/tvix/nix-compat/src/nix_daemon/de/mod.rs
@@ -0,0 +1,225 @@
+use std::error::Error as StdError;
+use std::future::Future;
+use std::ops::RangeInclusive;
+use std::{fmt, io};
+
+use ::bytes::Bytes;
+
+use super::ProtocolVersion;
+
+mod bytes;
+mod collections;
+mod int;
+#[cfg(any(test, feature = "test"))]
+pub mod mock;
+mod reader;
+
+pub use reader::{NixReader, NixReaderBuilder};
+
+/// Like serde the `Error` trait allows `NixRead` implementations to add
+/// custom error handling for `NixDeserialize`.
+pub trait Error: Sized + StdError {
+    /// A totally custom non-specific error.
+    fn custom<T: fmt::Display>(msg: T) -> Self;
+
+    /// Some kind of std::io::Error occured.
+    fn io_error(err: std::io::Error) -> Self {
+        Self::custom(format_args!("There was an I/O error {}", err))
+    }
+
+    /// The data read from `NixRead` is invalid.
+    /// This could be that some bytes were supposed to be valid UFT-8 but weren't.
+    fn invalid_data<T: fmt::Display>(msg: T) -> Self {
+        Self::custom(msg)
+    }
+
+    /// Required data is missing. This is mostly like an EOF
+    fn missing_data<T: fmt::Display>(msg: T) -> Self {
+        Self::custom(msg)
+    }
+}
+
+impl Error for io::Error {
+    fn custom<T: fmt::Display>(msg: T) -> Self {
+        io::Error::new(io::ErrorKind::Other, msg.to_string())
+    }
+
+    fn io_error(err: std::io::Error) -> Self {
+        err
+    }
+
+    fn invalid_data<T: fmt::Display>(msg: T) -> Self {
+        io::Error::new(io::ErrorKind::InvalidData, msg.to_string())
+    }
+
+    fn missing_data<T: fmt::Display>(msg: T) -> Self {
+        io::Error::new(io::ErrorKind::UnexpectedEof, msg.to_string())
+    }
+}
+
+/// A reader of data from the Nix daemon protocol.
+/// Basically there are two basic types in the Nix daemon protocol
+/// u64 and a bytes buffer. Everything else is more or less built on
+/// top of these two types.
+pub trait NixRead: Send {
+    type Error: Error + Send;
+
+    /// Some types are serialized differently depending on the version
+    /// of the protocol and so this can be used for implementing that.
+    fn version(&self) -> ProtocolVersion;
+
+    /// Read a single u64 from the protocol.
+    /// This returns an Option to support graceful shutdown.
+    fn try_read_number(
+        &mut self,
+    ) -> impl Future<Output = Result<Option<u64>, Self::Error>> + Send + '_;
+
+    /// Read bytes from the protocol.
+    /// A size limit on the returned bytes has to be specified.
+    /// This returns an Option to support graceful shutdown.
+    fn try_read_bytes_limited(
+        &mut self,
+        limit: RangeInclusive<usize>,
+    ) -> impl Future<Output = Result<Option<Bytes>, Self::Error>> + Send + '_;
+
+    /// Read bytes from the protocol without a limit.
+    /// The default implementation just calls `try_read_bytes_limited` with a
+    /// limit of `0..=usize::MAX` but other implementations are free to have a
+    /// reader wide limit.
+    /// This returns an Option to support graceful shutdown.
+    fn try_read_bytes(
+        &mut self,
+    ) -> impl Future<Output = Result<Option<Bytes>, Self::Error>> + Send + '_ {
+        self.try_read_bytes_limited(0..=usize::MAX)
+    }
+
+    /// Read a single u64 from the protocol.
+    /// This will return an error if the number could not be read.
+    fn read_number(&mut self) -> impl Future<Output = Result<u64, Self::Error>> + Send + '_ {
+        async move {
+            match self.try_read_number().await? {
+                Some(v) => Ok(v),
+                None => Err(Self::Error::missing_data("unexpected end-of-file")),
+            }
+        }
+    }
+
+    /// Read bytes from the protocol.
+    /// A size limit on the returned bytes has to be specified.
+    /// This will return an error if the number could not be read.
+    fn read_bytes_limited(
+        &mut self,
+        limit: RangeInclusive<usize>,
+    ) -> impl Future<Output = Result<Bytes, Self::Error>> + Send + '_ {
+        async move {
+            match self.try_read_bytes_limited(limit).await? {
+                Some(v) => Ok(v),
+                None => Err(Self::Error::missing_data("unexpected end-of-file")),
+            }
+        }
+    }
+
+    /// Read bytes from the protocol.
+    /// The default implementation just calls `read_bytes_limited` with a
+    /// limit of `0..=usize::MAX` but other implementations are free to have a
+    /// reader wide limit.
+    /// This will return an error if the bytes could not be read.
+    fn read_bytes(&mut self) -> impl Future<Output = Result<Bytes, Self::Error>> + Send + '_ {
+        self.read_bytes_limited(0..=usize::MAX)
+    }
+
+    /// Read a value from the protocol.
+    /// Uses `NixDeserialize::deserialize` to read a value.
+    fn read_value<V: NixDeserialize>(
+        &mut self,
+    ) -> impl Future<Output = Result<V, Self::Error>> + Send + '_ {
+        V::deserialize(self)
+    }
+
+    /// Read a value from the protocol.
+    /// Uses `NixDeserialize::try_deserialize` to read a value.
+    /// This returns an Option to support graceful shutdown.
+    fn try_read_value<V: NixDeserialize>(
+        &mut self,
+    ) -> impl Future<Output = Result<Option<V>, Self::Error>> + Send + '_ {
+        V::try_deserialize(self)
+    }
+}
+
+impl<T: ?Sized + NixRead> NixRead for &mut T {
+    type Error = T::Error;
+
+    fn version(&self) -> ProtocolVersion {
+        (**self).version()
+    }
+
+    fn try_read_number(
+        &mut self,
+    ) -> impl Future<Output = Result<Option<u64>, Self::Error>> + Send + '_ {
+        (**self).try_read_number()
+    }
+
+    fn try_read_bytes_limited(
+        &mut self,
+        limit: RangeInclusive<usize>,
+    ) -> impl Future<Output = Result<Option<Bytes>, Self::Error>> + Send + '_ {
+        (**self).try_read_bytes_limited(limit)
+    }
+
+    fn try_read_bytes(
+        &mut self,
+    ) -> impl Future<Output = Result<Option<Bytes>, Self::Error>> + Send + '_ {
+        (**self).try_read_bytes()
+    }
+
+    fn read_number(&mut self) -> impl Future<Output = Result<u64, Self::Error>> + Send + '_ {
+        (**self).read_number()
+    }
+
+    fn read_bytes_limited(
+        &mut self,
+        limit: RangeInclusive<usize>,
+    ) -> impl Future<Output = Result<Bytes, Self::Error>> + Send + '_ {
+        (**self).read_bytes_limited(limit)
+    }
+
+    fn read_bytes(&mut self) -> impl Future<Output = Result<Bytes, Self::Error>> + Send + '_ {
+        (**self).read_bytes()
+    }
+
+    fn try_read_value<V: NixDeserialize>(
+        &mut self,
+    ) -> impl Future<Output = Result<Option<V>, Self::Error>> + Send + '_ {
+        (**self).try_read_value()
+    }
+
+    fn read_value<V: NixDeserialize>(
+        &mut self,
+    ) -> impl Future<Output = Result<V, Self::Error>> + Send + '_ {
+        (**self).read_value()
+    }
+}
+
+/// A data structure that can be deserialized from the Nix daemon
+/// worker protocol.
+pub trait NixDeserialize: Sized {
+    /// Read a value from the reader.
+    /// This returns an Option to support gracefull shutdown.
+    fn try_deserialize<R>(
+        reader: &mut R,
+    ) -> impl Future<Output = Result<Option<Self>, R::Error>> + Send + '_
+    where
+        R: ?Sized + NixRead + Send;
+
+    fn deserialize<R>(reader: &mut R) -> impl Future<Output = Result<Self, R::Error>> + Send + '_
+    where
+        R: ?Sized + NixRead + Send,
+    {
+        async move {
+            match Self::try_deserialize(reader).await? {
+                Some(v) => Ok(v),
+                None => Err(R::Error::missing_data("unexpected end-of-file")),
+            }
+        }
+    }
+}
diff --git a/tvix/nix-compat/src/nix_daemon/de/reader.rs b/tvix/nix-compat/src/nix_daemon/de/reader.rs
new file mode 100644
index 000000000000..87c623b2220c
--- /dev/null
+++ b/tvix/nix-compat/src/nix_daemon/de/reader.rs
@@ -0,0 +1,527 @@
+use std::future::poll_fn;
+use std::io::{self, Cursor};
+use std::ops::RangeInclusive;
+use std::pin::Pin;
+use std::task::{ready, Context, Poll};
+
+use bytes::{Buf, BufMut, Bytes, BytesMut};
+use pin_project_lite::pin_project;
+use tokio::io::{AsyncBufRead, AsyncRead, AsyncReadExt, ReadBuf};
+
+use crate::nix_daemon::ProtocolVersion;
+use crate::wire::EMPTY_BYTES;
+
+use super::{Error, NixRead};
+
+pub struct NixReaderBuilder {
+    buf: Option<BytesMut>,
+    reserved_buf_size: usize,
+    max_buf_size: usize,
+    version: ProtocolVersion,
+}
+
+impl Default for NixReaderBuilder {
+    fn default() -> Self {
+        Self {
+            buf: Default::default(),
+            reserved_buf_size: 8192,
+            max_buf_size: 8192,
+            version: Default::default(),
+        }
+    }
+}
+
+impl NixReaderBuilder {
+    pub fn set_buffer(mut self, buf: BytesMut) -> Self {
+        self.buf = Some(buf);
+        self
+    }
+
+    pub fn set_reserved_buf_size(mut self, size: usize) -> Self {
+        self.reserved_buf_size = size;
+        self
+    }
+
+    pub fn set_max_buf_size(mut self, size: usize) -> Self {
+        self.max_buf_size = size;
+        self
+    }
+
+    pub fn set_version(mut self, version: ProtocolVersion) -> Self {
+        self.version = version;
+        self
+    }
+
+    pub fn build<R>(self, reader: R) -> NixReader<R> {
+        let buf = self.buf.unwrap_or_else(|| BytesMut::with_capacity(0));
+        NixReader {
+            buf,
+            inner: reader,
+            reserved_buf_size: self.reserved_buf_size,
+            max_buf_size: self.max_buf_size,
+            version: self.version,
+        }
+    }
+}
+
+pin_project! {
+    pub struct NixReader<R> {
+        #[pin]
+        inner: R,
+        buf: BytesMut,
+        reserved_buf_size: usize,
+        max_buf_size: usize,
+        version: ProtocolVersion,
+    }
+}
+
+impl NixReader<Cursor<Vec<u8>>> {
+    pub fn builder() -> NixReaderBuilder {
+        NixReaderBuilder::default()
+    }
+}
+
+impl<R> NixReader<R>
+where
+    R: AsyncReadExt,
+{
+    pub fn new(reader: R) -> NixReader<R> {
+        NixReader::builder().build(reader)
+    }
+
+    pub fn buffer(&self) -> &[u8] {
+        &self.buf[..]
+    }
+
+    #[cfg(test)]
+    pub(crate) fn buffer_mut(&mut self) -> &mut BytesMut {
+        &mut self.buf
+    }
+
+    /// Remaining capacity in internal buffer
+    pub fn remaining_mut(&self) -> usize {
+        self.buf.capacity() - self.buf.len()
+    }
+
+    fn poll_force_fill_buf(
+        mut self: Pin<&mut Self>,
+        cx: &mut Context<'_>,
+    ) -> Poll<io::Result<usize>> {
+        // Ensure that buffer has space for at least reserved_buf_size bytes
+        if self.remaining_mut() < self.reserved_buf_size {
+            let me = self.as_mut().project();
+            me.buf.reserve(*me.reserved_buf_size);
+        }
+        let me = self.project();
+        let n = {
+            let dst = me.buf.spare_capacity_mut();
+            let mut buf = ReadBuf::uninit(dst);
+            let ptr = buf.filled().as_ptr();
+            ready!(me.inner.poll_read(cx, &mut buf)?);
+
+            // Ensure the pointer does not change from under us
+            assert_eq!(ptr, buf.filled().as_ptr());
+            buf.filled().len()
+        };
+
+        // SAFETY: This is guaranteed to be the number of initialized (and read)
+        // bytes due to the invariants provided by `ReadBuf::filled`.
+        unsafe {
+            me.buf.advance_mut(n);
+        }
+        Poll::Ready(Ok(n))
+    }
+}
+
+impl<R> NixReader<R>
+where
+    R: AsyncReadExt + Unpin,
+{
+    async fn force_fill(&mut self) -> io::Result<usize> {
+        let mut p = Pin::new(self);
+        let read = poll_fn(|cx| p.as_mut().poll_force_fill_buf(cx)).await?;
+        Ok(read)
+    }
+}
+
+impl<R> NixRead for NixReader<R>
+where
+    R: AsyncReadExt + Send + Unpin,
+{
+    type Error = io::Error;
+
+    fn version(&self) -> ProtocolVersion {
+        self.version
+    }
+
+    async fn try_read_number(&mut self) -> Result<Option<u64>, Self::Error> {
+        let mut buf = [0u8; 8];
+        let read = self.read_buf(&mut &mut buf[..]).await?;
+        if read == 0 {
+            return Ok(None);
+        }
+        if read < 8 {
+            self.read_exact(&mut buf[read..]).await?;
+        }
+        let num = Buf::get_u64_le(&mut &buf[..]);
+        Ok(Some(num))
+    }
+
+    async fn try_read_bytes_limited(
+        &mut self,
+        limit: RangeInclusive<usize>,
+    ) -> Result<Option<Bytes>, Self::Error> {
+        assert!(
+            *limit.end() <= self.max_buf_size,
+            "The limit must be smaller than {}",
+            self.max_buf_size
+        );
+        match self.try_read_number().await? {
+            Some(raw_len) => {
+                // Check that length is in range and convert to usize
+                let len = raw_len
+                    .try_into()
+                    .ok()
+                    .filter(|v| limit.contains(v))
+                    .ok_or_else(|| Self::Error::invalid_data("bytes length out of range"))?;
+
+                // Calculate 64bit aligned length and convert to usize
+                let aligned: usize = raw_len
+                    .checked_add(7)
+                    .map(|v| v & !7)
+                    .ok_or_else(|| Self::Error::invalid_data("bytes length out of range"))?
+                    .try_into()
+                    .map_err(Self::Error::invalid_data)?;
+
+                // Ensure that there is enough space in buffer for contents
+                if self.buf.len() + self.remaining_mut() < aligned {
+                    self.buf.reserve(aligned - self.buf.len());
+                }
+                while self.buf.len() < aligned {
+                    if self.force_fill().await? == 0 {
+                        return Err(Self::Error::missing_data(
+                            "unexpected end-of-file reading bytes",
+                        ));
+                    }
+                }
+                let mut contents = self.buf.split_to(aligned);
+
+                let padding = aligned - len;
+                // Ensure padding is all zeros
+                if contents[len..] != EMPTY_BYTES[..padding] {
+                    return Err(Self::Error::invalid_data("non-zero padding"));
+                }
+
+                contents.truncate(len);
+                Ok(Some(contents.freeze()))
+            }
+            None => Ok(None),
+        }
+    }
+
+    fn try_read_bytes(
+        &mut self,
+    ) -> impl std::future::Future<Output = Result<Option<Bytes>, Self::Error>> + Send + '_ {
+        self.try_read_bytes_limited(0..=self.max_buf_size)
+    }
+
+    fn read_bytes(
+        &mut self,
+    ) -> impl std::future::Future<Output = Result<Bytes, Self::Error>> + Send + '_ {
+        self.read_bytes_limited(0..=self.max_buf_size)
+    }
+}
+
+impl<R: AsyncRead> AsyncRead for NixReader<R> {
+    fn poll_read(
+        mut self: Pin<&mut Self>,
+        cx: &mut Context<'_>,
+        buf: &mut ReadBuf<'_>,
+    ) -> Poll<io::Result<()>> {
+        let rem = ready!(self.as_mut().poll_fill_buf(cx))?;
+        let amt = std::cmp::min(rem.len(), buf.remaining());
+        buf.put_slice(&rem[0..amt]);
+        self.consume(amt);
+        Poll::Ready(Ok(()))
+    }
+}
+
+impl<R: AsyncRead> AsyncBufRead for NixReader<R> {
+    fn poll_fill_buf(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<&[u8]>> {
+        if self.as_ref().project_ref().buf.is_empty() {
+            ready!(self.as_mut().poll_force_fill_buf(cx))?;
+        }
+        let me = self.project();
+        Poll::Ready(Ok(&me.buf[..]))
+    }
+
+    fn consume(self: Pin<&mut Self>, amt: usize) {
+        let me = self.project();
+        me.buf.advance(amt)
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use std::time::Duration;
+
+    use hex_literal::hex;
+    use rstest::rstest;
+    use tokio_test::io::Builder;
+
+    use super::*;
+    use crate::nix_daemon::de::NixRead;
+
+    #[tokio::test]
+    async fn test_read_u64() {
+        let mock = Builder::new().read(&hex!("0100 0000 0000 0000")).build();
+        let mut reader = NixReader::new(mock);
+
+        assert_eq!(1, reader.read_number().await.unwrap());
+        assert_eq!(hex!(""), reader.buffer());
+
+        let mut buf = Vec::new();
+        reader.read_to_end(&mut buf).await.unwrap();
+        assert_eq!(hex!(""), &buf[..]);
+    }
+
+    #[tokio::test]
+    async fn test_read_u64_rest() {
+        let mock = Builder::new()
+            .read(&hex!("0100 0000 0000 0000 0123 4567 89AB CDEF"))
+            .build();
+        let mut reader = NixReader::new(mock);
+
+        assert_eq!(1, reader.read_number().await.unwrap());
+        assert_eq!(hex!("0123 4567 89AB CDEF"), reader.buffer());
+
+        let mut buf = Vec::new();
+        reader.read_to_end(&mut buf).await.unwrap();
+        assert_eq!(hex!("0123 4567 89AB CDEF"), &buf[..]);
+    }
+
+    #[tokio::test]
+    async fn test_read_u64_partial() {
+        let mock = Builder::new()
+            .read(&hex!("0100 0000"))
+            .wait(Duration::ZERO)
+            .read(&hex!("0000 0000 0123 4567 89AB CDEF"))
+            .wait(Duration::ZERO)
+            .read(&hex!("0100 0000"))
+            .build();
+        let mut reader = NixReader::new(mock);
+
+        assert_eq!(1, reader.read_number().await.unwrap());
+        assert_eq!(hex!("0123 4567 89AB CDEF"), reader.buffer());
+
+        let mut buf = Vec::new();
+        reader.read_to_end(&mut buf).await.unwrap();
+        assert_eq!(hex!("0123 4567 89AB CDEF 0100 0000"), &buf[..]);
+    }
+
+    #[tokio::test]
+    async fn test_read_u64_eof() {
+        let mock = Builder::new().build();
+        let mut reader = NixReader::new(mock);
+
+        assert_eq!(
+            io::ErrorKind::UnexpectedEof,
+            reader.read_number().await.unwrap_err().kind()
+        );
+    }
+
+    #[tokio::test]
+    async fn test_try_read_u64_none() {
+        let mock = Builder::new().build();
+        let mut reader = NixReader::new(mock);
+
+        assert_eq!(None, reader.try_read_number().await.unwrap());
+    }
+
+    #[tokio::test]
+    async fn test_try_read_u64_eof() {
+        let mock = Builder::new().read(&hex!("0100 0000 0000")).build();
+        let mut reader = NixReader::new(mock);
+
+        assert_eq!(
+            io::ErrorKind::UnexpectedEof,
+            reader.try_read_number().await.unwrap_err().kind()
+        );
+    }
+
+    #[tokio::test]
+    async fn test_try_read_u64_eof2() {
+        let mock = Builder::new()
+            .read(&hex!("0100"))
+            .wait(Duration::ZERO)
+            .read(&hex!("0000 0000"))
+            .build();
+        let mut reader = NixReader::new(mock);
+
+        assert_eq!(
+            io::ErrorKind::UnexpectedEof,
+            reader.try_read_number().await.unwrap_err().kind()
+        );
+    }
+
+    #[rstest]
+    #[case::empty(b"", &hex!("0000 0000 0000 0000"))]
+    #[case::one(b")", &hex!("0100 0000 0000 0000 2900 0000 0000 0000"))]
+    #[case::two(b"it", &hex!("0200 0000 0000 0000 6974 0000 0000 0000"))]
+    #[case::three(b"tea", &hex!("0300 0000 0000 0000 7465 6100 0000 0000"))]
+    #[case::four(b"were", &hex!("0400 0000 0000 0000 7765 7265 0000 0000"))]
+    #[case::five(b"where", &hex!("0500 0000 0000 0000 7768 6572 6500 0000"))]
+    #[case::six(b"unwrap", &hex!("0600 0000 0000 0000 756E 7772 6170 0000"))]
+    #[case::seven(b"where's", &hex!("0700 0000 0000 0000 7768 6572 6527 7300"))]
+    #[case::aligned(b"read_tea", &hex!("0800 0000 0000 0000 7265 6164 5F74 6561"))]
+    #[case::more_bytes(b"read_tess", &hex!("0900 0000 0000 0000 7265 6164 5F74 6573 7300 0000 0000 0000"))]
+    #[tokio::test]
+    async fn test_read_bytes(#[case] expected: &[u8], #[case] data: &[u8]) {
+        let mock = Builder::new().read(data).build();
+        let mut reader = NixReader::new(mock);
+        let actual = reader.read_bytes().await.unwrap();
+        assert_eq!(&actual[..], expected);
+    }
+
+    #[tokio::test]
+    async fn test_read_bytes_empty() {
+        let mock = Builder::new().build();
+        let mut reader = NixReader::new(mock);
+
+        assert_eq!(
+            io::ErrorKind::UnexpectedEof,
+            reader.read_bytes().await.unwrap_err().kind()
+        );
+    }
+
+    #[tokio::test]
+    async fn test_try_read_bytes_none() {
+        let mock = Builder::new().build();
+        let mut reader = NixReader::new(mock);
+
+        assert_eq!(None, reader.try_read_bytes().await.unwrap());
+    }
+
+    #[tokio::test]
+    async fn test_try_read_bytes_missing_data() {
+        let mock = Builder::new()
+            .read(&hex!("0500"))
+            .wait(Duration::ZERO)
+            .read(&hex!("0000 0000"))
+            .build();
+        let mut reader = NixReader::new(mock);
+
+        assert_eq!(
+            io::ErrorKind::UnexpectedEof,
+            reader.try_read_bytes().await.unwrap_err().kind()
+        );
+    }
+
+    #[tokio::test]
+    async fn test_try_read_bytes_missing_padding() {
+        let mock = Builder::new()
+            .read(&hex!("0200 0000 0000 0000"))
+            .wait(Duration::ZERO)
+            .read(&hex!("1234"))
+            .build();
+        let mut reader = NixReader::new(mock);
+
+        assert_eq!(
+            io::ErrorKind::UnexpectedEof,
+            reader.try_read_bytes().await.unwrap_err().kind()
+        );
+    }
+
+    #[tokio::test]
+    async fn test_read_bytes_bad_padding() {
+        let mock = Builder::new()
+            .read(&hex!("0200 0000 0000 0000"))
+            .wait(Duration::ZERO)
+            .read(&hex!("1234 0100 0000 0000"))
+            .build();
+        let mut reader = NixReader::new(mock);
+
+        assert_eq!(
+            io::ErrorKind::InvalidData,
+            reader.read_bytes().await.unwrap_err().kind()
+        );
+    }
+
+    #[tokio::test]
+    async fn test_read_bytes_limited_out_of_range() {
+        let mock = Builder::new().read(&hex!("FFFF 0000 0000 0000")).build();
+        let mut reader = NixReader::new(mock);
+
+        assert_eq!(
+            io::ErrorKind::InvalidData,
+            reader.read_bytes_limited(0..=50).await.unwrap_err().kind()
+        );
+    }
+
+    #[tokio::test]
+    async fn test_read_bytes_length_overflow() {
+        let mock = Builder::new().read(&hex!("F9FF FFFF FFFF FFFF")).build();
+        let mut reader = NixReader::builder()
+            .set_max_buf_size(usize::MAX)
+            .build(mock);
+
+        assert_eq!(
+            io::ErrorKind::InvalidData,
+            reader
+                .read_bytes_limited(0..=usize::MAX)
+                .await
+                .unwrap_err()
+                .kind()
+        );
+    }
+
+    // FUTUREWORK: Test this on supported hardware
+    #[tokio::test]
+    #[cfg(any(target_pointer_width = "16", target_pointer_width = "32"))]
+    async fn test_bytes_length_conversion_overflow() {
+        let len = (usize::MAX as u64) + 1;
+        let mock = Builder::new().read(&len.to_le_bytes()).build();
+        let mut reader = NixReader::new(mock);
+        assert_eq!(
+            std::io::ErrorKind::InvalidData,
+            reader.read_value::<usize>().await.unwrap_err().kind()
+        );
+    }
+
+    // FUTUREWORK: Test this on supported hardware
+    #[tokio::test]
+    #[cfg(any(target_pointer_width = "16", target_pointer_width = "32"))]
+    async fn test_bytes_aligned_length_conversion_overflow() {
+        let len = (usize::MAX - 6) as u64;
+        let mock = Builder::new().read(&len.to_le_bytes()).build();
+        let mut reader = NixReader::new(mock);
+        assert_eq!(
+            std::io::ErrorKind::InvalidData,
+            reader.read_value::<usize>().await.unwrap_err().kind()
+        );
+    }
+
+    #[tokio::test]
+    async fn test_buffer_resize() {
+        let mock = Builder::new()
+            .read(&hex!("0100"))
+            .read(&hex!("0000 0000 0000"))
+            .build();
+        let mut reader = NixReader::builder().set_reserved_buf_size(8).build(mock);
+        // buffer has no capacity initially
+        assert_eq!(0, reader.buffer_mut().capacity());
+
+        assert_eq!(2, reader.force_fill().await.unwrap());
+
+        // After first read buffer should have capacity we chose
+        assert_eq!(8, reader.buffer_mut().capacity());
+
+        // Because there was only 6 bytes remaining in buffer,
+        // which is enough to read the last 6 bytes, but we require
+        // capacity for 8 bytes, it doubled the capacity
+        assert_eq!(6, reader.force_fill().await.unwrap());
+        assert_eq!(16, reader.buffer_mut().capacity());
+
+        assert_eq!(1, reader.read_number().await.unwrap());
+    }
+}
diff --git a/tvix/nix-compat/src/nix_daemon/mod.rs b/tvix/nix-compat/src/nix_daemon/mod.rs
index fe652377d1b4..11413e85fd1b 100644
--- a/tvix/nix-compat/src/nix_daemon/mod.rs
+++ b/tvix/nix-compat/src/nix_daemon/mod.rs
@@ -2,3 +2,5 @@ pub mod worker_protocol;
 
 mod protocol_version;
 pub use protocol_version::ProtocolVersion;
+
+pub mod de;
diff --git a/tvix/nix-compat/src/nix_daemon/protocol_version.rs b/tvix/nix-compat/src/nix_daemon/protocol_version.rs
index 8fd2b085c962..19da28d484dd 100644
--- a/tvix/nix-compat/src/nix_daemon/protocol_version.rs
+++ b/tvix/nix-compat/src/nix_daemon/protocol_version.rs
@@ -1,3 +1,6 @@
+/// The latest version that is currently supported by nix-compat.
+static DEFAULT_PROTOCOL_VERSION: ProtocolVersion = ProtocolVersion::from_parts(1, 37);
+
 /// Protocol versions are represented as a u16.
 /// The upper 8 bits are the major version, the lower bits the minor.
 /// This is not aware of any endianness, use [crate::wire::read_u64] to get an
@@ -20,6 +23,12 @@ impl ProtocolVersion {
     }
 }
 
+impl Default for ProtocolVersion {
+    fn default() -> Self {
+        DEFAULT_PROTOCOL_VERSION
+    }
+}
+
 impl PartialOrd for ProtocolVersion {
     fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
         Some(self.cmp(other))
@@ -45,6 +54,13 @@ impl From<u16> for ProtocolVersion {
     }
 }
 
+#[cfg(any(test, feature = "test"))]
+impl From<(u8, u8)> for ProtocolVersion {
+    fn from((major, minor): (u8, u8)) -> Self {
+        Self::from_parts(major, minor)
+    }
+}
+
 impl TryFrom<u64> for ProtocolVersion {
     type Error = &'static str;
 
diff --git a/tvix/nix-compat/src/nix_http/mod.rs b/tvix/nix-compat/src/nix_http/mod.rs
new file mode 100644
index 000000000000..89ba147b8071
--- /dev/null
+++ b/tvix/nix-compat/src/nix_http/mod.rs
@@ -0,0 +1,115 @@
+use tracing::trace;
+
+use crate::nixbase32;
+
+/// The mime type used for NAR files, both compressed and uncompressed
+pub const MIME_TYPE_NAR: &str = "application/x-nix-nar";
+/// The mime type used for NARInfo files
+pub const MIME_TYPE_NARINFO: &str = "text/x-nix-narinfo";
+/// The mime type used for the `nix-cache-info` file
+pub const MIME_TYPE_CACHE_INFO: &str = "text/x-nix-cache-info";
+
+/// Parses a `14cx20k6z4hq508kqi2lm79qfld5f9mf7kiafpqsjs3zlmycza0k.nar`
+/// string and returns the nixbase32-decoded digest, as well as the compression
+/// suffix (which might be empty).
+pub fn parse_nar_str(s: &str) -> Option<([u8; 32], &str)> {
+    if !s.is_char_boundary(52) {
+        trace!("invalid string, no char boundary at 52");
+        return None;
+    }
+
+    let (hash_str, suffix) = s.split_at(52);
+
+    // we know hash_str is 52 bytes, so it's ok to unwrap here.
+    let hash_str_fixed: [u8; 52] = hash_str.as_bytes().try_into().unwrap();
+
+    match suffix.strip_prefix(".nar") {
+        Some(compression_suffix) => match nixbase32::decode_fixed(hash_str_fixed) {
+            Err(e) => {
+                trace!(err=%e, "invalid nixbase32 encoding");
+                None
+            }
+            Ok(digest) => Some((digest, compression_suffix)),
+        },
+        None => {
+            trace!("no .nar suffix");
+            None
+        }
+    }
+}
+
+/// Parses a `3mzh8lvgbynm9daj7c82k2sfsfhrsfsy.narinfo` string and returns the
+/// nixbase32-decoded digest.
+pub fn parse_narinfo_str(s: &str) -> Option<[u8; 20]> {
+    if !s.is_char_boundary(32) {
+        trace!("invalid string, no char boundary at 32");
+        return None;
+    }
+
+    match s.split_at(32) {
+        (hash_str, ".narinfo") => {
+            // we know this is 32 bytes, so it's ok to unwrap here.
+            let hash_str_fixed: [u8; 32] = hash_str.as_bytes().try_into().unwrap();
+
+            match nixbase32::decode_fixed(hash_str_fixed) {
+                Err(e) => {
+                    trace!(err=%e, "invalid nixbase32 encoding");
+                    None
+                }
+                Ok(digest) => Some(digest),
+            }
+        }
+        _ => {
+            trace!("invalid string, no .narinfo suffix");
+            None
+        }
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use super::{parse_nar_str, parse_narinfo_str};
+    use hex_literal::hex;
+
+    #[test]
+    fn parse_nar_str_success() {
+        assert_eq!(
+            (
+                hex!("13a8cf7ca57f68a9f1752acee36a72a55187d3a954443c112818926f26109d91"),
+                ""
+            ),
+            parse_nar_str("14cx20k6z4hq508kqi2lm79qfld5f9mf7kiafpqsjs3zlmycza0k.nar").unwrap()
+        );
+
+        assert_eq!(
+            (
+                hex!("13a8cf7ca57f68a9f1752acee36a72a55187d3a954443c112818926f26109d91"),
+                ".xz"
+            ),
+            parse_nar_str("14cx20k6z4hq508kqi2lm79qfld5f9mf7kiafpqsjs3zlmycza0k.nar.xz").unwrap()
+        )
+    }
+
+    #[test]
+    fn parse_nar_str_failure() {
+        assert!(parse_nar_str("14cx20k6z4hq508kqi2lm79qfld5f9mf7kiafpqsjs3zlmycza0").is_none());
+        assert!(
+            parse_nar_str("14cx20k6z4hq508kqi2lm79qfld5f9mf7kiafpqsjs3zlmycza0๐ŸฆŠ.nar").is_none()
+        )
+    }
+    #[test]
+    fn parse_narinfo_str_success() {
+        assert_eq!(
+            hex!("8a12321522fd91efbd60ebb2481af88580f61600"),
+            parse_narinfo_str("00bgd045z0d4icpbc2yyz4gx48ak44la.narinfo").unwrap()
+        );
+    }
+
+    #[test]
+    fn parse_narinfo_str_failure() {
+        assert!(parse_narinfo_str("00bgd045z0d4icpbc2yyz4gx48ak44la").is_none());
+        assert!(parse_narinfo_str("/00bgd045z0d4icpbc2yyz4gx48ak44la").is_none());
+        assert!(parse_narinfo_str("000000").is_none());
+        assert!(parse_narinfo_str("00bgd045z0d4icpbc2yyz4gx48ak44l๐ŸฆŠ.narinfo").is_none());
+    }
+}
diff --git a/tvix/nix-compat/src/nixbase32.rs b/tvix/nix-compat/src/nixbase32.rs
index b7ffc1dc2bcd..8d34e4cedce6 100644
--- a/tvix/nix-compat/src/nixbase32.rs
+++ b/tvix/nix-compat/src/nixbase32.rs
@@ -62,6 +62,12 @@ pub fn decode(input: impl AsRef<[u8]>) -> Result<Vec<u8>, DecodeError> {
     let input = input.as_ref();
 
     let output_len = decode_len(input.len());
+    if input.len() != encode_len(output_len) {
+        return Err(DecodeError {
+            position: input.len().min(encode_len(output_len)),
+            kind: DecodeKind::Length,
+        });
+    }
     let mut output: Vec<u8> = vec![0x00; output_len];
 
     decode_inner(input, &mut output)?;
@@ -163,6 +169,10 @@ mod tests {
     #[case::invalid_encoding_1("zz", None)]
     // this is an even more specific example - it'd decode as 00000000 11
     #[case::invalid_encoding_2("c0", None)]
+    // This has an invalid length
+    #[case::invalid_encoding_3("0", None)]
+    // This has an invalid length
+    #[case::invalid_encoding_4("0zz", None)]
     #[test]
     fn decode(#[case] enc: &str, #[case] dec: Option<&[u8]>) {
         match dec {
@@ -201,6 +211,11 @@ mod tests {
     #[test]
     fn decode_len() {
         assert_eq!(super::decode_len(0), 0);
+        assert_eq!(super::decode_len(1), 0);
+        assert_eq!(super::decode_len(2), 1);
+        assert_eq!(super::decode_len(3), 1);
+        assert_eq!(super::decode_len(4), 2);
+        assert_eq!(super::decode_len(5), 3);
         assert_eq!(super::decode_len(32), 20);
     }
 }
diff --git a/tvix/nix-compat/src/nixcpp/conf.rs b/tvix/nix-compat/src/nixcpp/conf.rs
new file mode 100644
index 000000000000..68308115f988
--- /dev/null
+++ b/tvix/nix-compat/src/nixcpp/conf.rs
@@ -0,0 +1,202 @@
+use std::{fmt::Display, str::FromStr};
+
+/// Represents configuration as stored in /etc/nix/nix.conf.
+/// This list is not exhaustive, feel free to add more.
+#[derive(Clone, Debug, Default, Eq, PartialEq)]
+pub struct NixConfig<'a> {
+    pub allowed_users: Option<Vec<&'a str>>,
+    pub auto_optimise_store: Option<bool>,
+    pub cores: Option<u64>,
+    pub max_jobs: Option<u64>,
+    pub require_sigs: Option<bool>,
+    pub sandbox: Option<SandboxSetting>,
+    pub sandbox_fallback: Option<bool>,
+    pub substituters: Option<Vec<&'a str>>,
+    pub system_features: Option<Vec<&'a str>>,
+    pub trusted_public_keys: Option<Vec<crate::narinfo::VerifyingKey>>,
+    pub trusted_substituters: Option<Vec<&'a str>>,
+    pub trusted_users: Option<Vec<&'a str>>,
+    pub extra_platforms: Option<Vec<&'a str>>,
+    pub extra_sandbox_paths: Option<Vec<&'a str>>,
+    pub experimental_features: Option<Vec<&'a str>>,
+    pub builders_use_substitutes: Option<bool>,
+}
+
+impl<'a> NixConfig<'a> {
+    /// Parses configuration from a file like `/etc/nix/nix.conf`, returning
+    /// a [NixConfig] with all values contained in there.
+    /// It does not support parsing multiple config files, merging semantics,
+    /// and also does not understand `include` and `!include` statements.
+    pub fn parse(input: &'a str) -> Result<Self, Error> {
+        let mut out = Self::default();
+
+        for line in input.lines() {
+            // strip comments at the end of the line
+            let line = if let Some((line, _comment)) = line.split_once('#') {
+                line
+            } else {
+                line
+            };
+
+            // skip comments and empty lines
+            if line.trim().is_empty() {
+                continue;
+            }
+
+            let (tag, val) = line
+                .split_once('=')
+                .ok_or_else(|| Error::InvalidLine(line.to_string()))?;
+
+            // trim whitespace
+            let tag = tag.trim();
+            let val = val.trim();
+
+            #[inline]
+            fn parse_val<'a>(this: &mut NixConfig<'a>, tag: &str, val: &'a str) -> Option<()> {
+                match tag {
+                    "allowed-users" => {
+                        this.allowed_users = Some(val.split_whitespace().collect());
+                    }
+                    "auto-optimise-store" => {
+                        this.auto_optimise_store = Some(val.parse::<bool>().ok()?);
+                    }
+                    "cores" => {
+                        this.cores = Some(val.parse().ok()?);
+                    }
+                    "max-jobs" => {
+                        this.max_jobs = Some(val.parse().ok()?);
+                    }
+                    "require-sigs" => {
+                        this.require_sigs = Some(val.parse().ok()?);
+                    }
+                    "sandbox" => this.sandbox = Some(val.parse().ok()?),
+                    "sandbox-fallback" => this.sandbox_fallback = Some(val.parse().ok()?),
+                    "substituters" => this.substituters = Some(val.split_whitespace().collect()),
+                    "system-features" => {
+                        this.system_features = Some(val.split_whitespace().collect())
+                    }
+                    "trusted-public-keys" => {
+                        this.trusted_public_keys = Some(
+                            val.split_whitespace()
+                                .map(crate::narinfo::VerifyingKey::parse)
+                                .collect::<Result<Vec<crate::narinfo::VerifyingKey>, _>>()
+                                .ok()?,
+                        )
+                    }
+                    "trusted-substituters" => {
+                        this.trusted_substituters = Some(val.split_whitespace().collect())
+                    }
+                    "trusted-users" => this.trusted_users = Some(val.split_whitespace().collect()),
+                    "extra-platforms" => {
+                        this.extra_platforms = Some(val.split_whitespace().collect())
+                    }
+                    "extra-sandbox-paths" => {
+                        this.extra_sandbox_paths = Some(val.split_whitespace().collect())
+                    }
+                    "experimental-features" => {
+                        this.experimental_features = Some(val.split_whitespace().collect())
+                    }
+                    "builders-use-substitutes" => {
+                        this.builders_use_substitutes = Some(val.parse().ok()?)
+                    }
+                    _ => return None,
+                }
+                Some(())
+            }
+
+            parse_val(&mut out, tag, val)
+                .ok_or_else(|| Error::InvalidValue(tag.to_string(), val.to_string()))?
+        }
+
+        Ok(out)
+    }
+}
+
+#[derive(thiserror::Error, Debug)]
+pub enum Error {
+    #[error("Invalid line: {0}")]
+    InvalidLine(String),
+    #[error("Unrecognized key: {0}")]
+    UnrecognizedKey(String),
+    #[error("Invalid value '{1}' for key '{0}'")]
+    InvalidValue(String, String),
+}
+
+/// Valid values for the Nix 'sandbox' setting
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum SandboxSetting {
+    True,
+    False,
+    Relaxed,
+}
+
+impl Display for SandboxSetting {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        match self {
+            SandboxSetting::True => write!(f, "true"),
+            SandboxSetting::False => write!(f, "false"),
+            SandboxSetting::Relaxed => write!(f, "relaxed"),
+        }
+    }
+}
+
+impl FromStr for SandboxSetting {
+    type Err = &'static str;
+
+    fn from_str(s: &str) -> Result<Self, Self::Err> {
+        Ok(match s {
+            "true" => Self::True,
+            "false" => Self::False,
+            "relaxed" => Self::Relaxed,
+            _ => return Err("invalid value"),
+        })
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::{narinfo::VerifyingKey, nixcpp::conf::SandboxSetting};
+
+    use super::NixConfig;
+
+    #[test]
+    pub fn test_parse() {
+        let config = NixConfig::parse(include_str!("../../testdata/nix.conf")).expect("must parse");
+
+        assert_eq!(
+            NixConfig {
+                allowed_users: Some(vec!["*"]),
+                auto_optimise_store: Some(false),
+                cores: Some(0),
+                max_jobs: Some(8),
+                require_sigs: Some(true),
+                sandbox: Some(SandboxSetting::True),
+                sandbox_fallback: Some(false),
+                substituters: Some(vec!["https://nix-community.cachix.org", "https://cache.nixos.org/"]),
+                system_features: Some(vec!["nixos-test", "benchmark", "big-parallel", "kvm"]),
+                trusted_public_keys: Some(vec![
+                    VerifyingKey::parse("cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=")
+                        .expect("failed to parse pubkey"),
+                    VerifyingKey::parse("nix-community.cachix.org-1:mB9FSh9qf2dCimDSUo8Zy7bkq5CX+/rkCWyvRCYg3Fs=")
+                        .expect("failed to parse pubkey")
+                ]),
+                trusted_substituters: Some(vec![]),
+                trusted_users: Some(vec!["flokli"]),
+                extra_platforms: Some(vec!["aarch64-linux", "i686-linux"]),
+                extra_sandbox_paths: Some(vec![
+                    "/run/binfmt", "/nix/store/swwyxyqpazzvbwx8bv40z7ih144q841f-qemu-aarch64-binfmt-P-x86_64-unknown-linux-musl"
+                ]),
+                experimental_features: Some(vec!["nix-command"]),
+                builders_use_substitutes: Some(true)
+            },
+            config
+        );
+
+        // parse a config file using some non-space whitespaces, as well as comments right after the lines.
+        // ensure it contains the same data as initially parsed.
+        let other_config = NixConfig::parse(include_str!("../../testdata/other_nix.conf"))
+            .expect("other config must parse");
+
+        assert_eq!(config, other_config);
+    }
+}
diff --git a/tvix/nix-compat/src/nixcpp/mod.rs b/tvix/nix-compat/src/nixcpp/mod.rs
new file mode 100644
index 000000000000..57518de8cc52
--- /dev/null
+++ b/tvix/nix-compat/src/nixcpp/mod.rs
@@ -0,0 +1,9 @@
+//! Contains code parsing some of the Nixcpp config files etc.
+//! left by Nix *on the local disk*.
+//!
+//! This is only for Nix' own state/config.
+//!
+//! More "standardized" protocols, like parts of the Nix HTTP Binary Cache
+//! protocol live elsewhere.
+
+pub mod conf;
diff --git a/tvix/nix-compat/src/nixhash/ca_hash.rs b/tvix/nix-compat/src/nixhash/ca_hash.rs
index 2bf5f966cefe..e6cbaf5b710a 100644
--- a/tvix/nix-compat/src/nixhash/ca_hash.rs
+++ b/tvix/nix-compat/src/nixhash/ca_hash.rs
@@ -47,12 +47,33 @@ impl CAHash {
         }
     }
 
+    /// Returns a colon-separated string consisting of mode, recursiveness and
+    /// hash algo. Used as a prefix in various string representations.
+    pub fn algo_str(&self) -> &'static str {
+        match self.mode() {
+            HashMode::Flat => match self.hash().as_ref() {
+                NixHash::Md5(_) => "fixed:md5",
+                NixHash::Sha1(_) => "fixed:sha1",
+                NixHash::Sha256(_) => "fixed:sha256",
+                NixHash::Sha512(_) => "fixed:sha512",
+            },
+            HashMode::Nar => match self.hash().as_ref() {
+                NixHash::Md5(_) => "fixed:r:md5",
+                NixHash::Sha1(_) => "fixed:r:sha1",
+                NixHash::Sha256(_) => "fixed:r:sha256",
+                NixHash::Sha512(_) => "fixed:r:sha512",
+            },
+            HashMode::Text => "text:sha256",
+        }
+    }
+
     /// Constructs a [CAHash] from the textual representation,
     /// which is one of the three:
     /// - `text:sha256:$nixbase32sha256digest`
     /// - `fixed:r:$algo:$nixbase32digest`
     /// - `fixed:$algo:$nixbase32digest`
-    /// which is the format that's used in the NARInfo for example.
+    ///
+    /// These formats are used in NARInfo, for example.
     pub fn from_nix_hex_str(s: &str) -> Option<Self> {
         let (tag, s) = s.split_once(':')?;
 
@@ -76,13 +97,11 @@ impl CAHash {
     /// Formats a [CAHash] in the Nix default hash format, which is the format
     /// that's used in NARInfos for example.
     pub fn to_nix_nixbase32_string(&self) -> String {
-        match self {
-            CAHash::Flat(nh) => format!("fixed:{}", nh.to_nix_nixbase32_string()),
-            CAHash::Nar(nh) => format!("fixed:r:{}", nh.to_nix_nixbase32_string()),
-            CAHash::Text(digest) => {
-                format!("text:sha256:{}", nixbase32::encode(digest))
-            }
-        }
+        format!(
+            "{}:{}",
+            self.algo_str(),
+            nixbase32::encode(self.hash().digest_as_bytes())
+        )
     }
 
     /// This takes a serde_json::Map and turns it into this structure. This is necessary to do such
@@ -90,11 +109,13 @@ impl CAHash {
     /// know whether we have a invalid or a missing NixHashWithMode structure in another structure,
     /// e.g. Output.
     /// This means we have this combinatorial situation:
+    ///
     /// - no hash, no hashAlgo: no [CAHash] so we return Ok(None).
     /// - present hash, missing hashAlgo: invalid, we will return missing_field
     /// - missing hash, present hashAlgo: same
     /// - present hash, present hashAlgo: either we return ourselves or a type/value validation
-    /// error.
+    ///   error.
+    ///
     /// This function is for internal consumption regarding those needs until we have a better
     /// solution. Now this is said, let's explain how this works.
     ///
diff --git a/tvix/nix-compat/src/store_path/mod.rs b/tvix/nix-compat/src/store_path/mod.rs
index 707c41a92d74..177cc96ce20f 100644
--- a/tvix/nix-compat/src/store_path/mod.rs
+++ b/tvix/nix-compat/src/store_path/mod.rs
@@ -2,14 +2,15 @@ use crate::nixbase32;
 use data_encoding::{DecodeError, BASE64};
 use serde::{Deserialize, Serialize};
 use std::{
-    fmt,
-    path::PathBuf,
+    fmt::{self, Display},
+    ops::Deref,
+    path::Path,
     str::{self, FromStr},
 };
 use thiserror;
 
 #[cfg(target_family = "unix")]
-use std::os::unix::ffi::OsStringExt;
+use std::os::unix::ffi::OsStrExt;
 
 mod utils;
 
@@ -54,17 +55,26 @@ pub enum Error {
 /// A [StorePath] does not encode any additional subpath "inside" the store
 /// path.
 #[derive(Clone, Debug, PartialEq, Eq, Hash)]
-pub struct StorePath {
+pub struct StorePath<S>
+where
+    S: std::cmp::Eq + std::cmp::PartialEq,
+{
     digest: [u8; DIGEST_SIZE],
-    name: Box<str>,
+    name: S,
 }
+/// Like [StorePath], but without a heap allocation for the name.
+/// Used by [StorePath] for parsing.
+pub type StorePathRef<'a> = StorePath<&'a str>;
 
-impl StorePath {
+impl<S> StorePath<S>
+where
+    S: std::cmp::Eq + Deref<Target = str>,
+{
     pub fn digest(&self) -> &[u8; DIGEST_SIZE] {
         &self.digest
     }
 
-    pub fn name(&self) -> &str {
+    pub fn name(&self) -> &S {
         &self.name
     }
 
@@ -74,60 +84,101 @@ impl StorePath {
             name: &self.name,
         }
     }
-}
 
-impl PartialOrd for StorePath {
-    fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
-        Some(self.cmp(other))
+    pub fn to_owned(&self) -> StorePath<String> {
+        StorePath {
+            digest: self.digest,
+            name: self.name.to_string(),
+        }
     }
-}
 
-/// `StorePath`s are sorted by their reverse digest to match the sorting order
-/// of the nixbase32-encoded string.
-impl Ord for StorePath {
-    fn cmp(&self, other: &Self) -> std::cmp::Ordering {
-        self.as_ref().cmp(&other.as_ref())
+    /// Construct a [StorePath] by passing the `$digest-$name` string
+    /// that comes after [STORE_DIR_WITH_SLASH].
+    pub fn from_bytes<'a>(s: &'a [u8]) -> Result<Self, Error>
+    where
+        S: From<&'a str>,
+    {
+        // the whole string needs to be at least:
+        //
+        // - 32 characters (encoded hash)
+        // - 1 dash
+        // - 1 character for the name
+        if s.len() < ENCODED_DIGEST_SIZE + 2 {
+            Err(Error::InvalidLength)?
+        }
+
+        let digest = nixbase32::decode_fixed(&s[..ENCODED_DIGEST_SIZE])?;
+
+        if s[ENCODED_DIGEST_SIZE] != b'-' {
+            return Err(Error::MissingDash);
+        }
+
+        Ok(StorePath {
+            digest,
+            name: validate_name(&s[ENCODED_DIGEST_SIZE + 1..])?.into(),
+        })
     }
-}
 
-impl FromStr for StorePath {
-    type Err = Error;
+    /// Construct a [StorePathRef] from a name and digest.
+    /// The name is validated, and the digest checked for size.
+    pub fn from_name_and_digest<'a>(name: &'a str, digest: &[u8]) -> Result<Self, Error>
+    where
+        S: From<&'a str>,
+    {
+        let digest_fixed = digest.try_into().map_err(|_| Error::InvalidLength)?;
+        Self::from_name_and_digest_fixed(name, digest_fixed)
+    }
 
-    /// Construct a [StorePath] by passing the `$digest-$name` string
-    /// that comes after [STORE_DIR_WITH_SLASH].
-    fn from_str(s: &str) -> Result<Self, Self::Err> {
-        Self::from_bytes(s.as_bytes())
+    /// Construct a [StorePathRef] from a name and digest of correct length.
+    /// The name is validated.
+    pub fn from_name_and_digest_fixed<'a>(
+        name: &'a str,
+        digest: [u8; DIGEST_SIZE],
+    ) -> Result<Self, Error>
+    where
+        S: From<&'a str>,
+    {
+        Ok(Self {
+            name: validate_name(name.as_bytes())?.into(),
+            digest,
+        })
     }
-}
 
-impl StorePath {
-    /// Construct a [StorePath] by passing the `$digest-$name` string
-    /// that comes after [STORE_DIR_WITH_SLASH].
-    pub fn from_bytes(s: &[u8]) -> Result<StorePath, Error> {
-        Ok(StorePathRef::from_bytes(s)?.to_owned())
+    /// Construct a [StorePathRef] from an absolute store path string.
+    /// This is equivalent to calling [StorePathRef::from_bytes], but stripping
+    /// the [STORE_DIR_WITH_SLASH] prefix before.
+    pub fn from_absolute_path<'a>(s: &'a [u8]) -> Result<Self, Error>
+    where
+        S: From<&'a str>,
+    {
+        match s.strip_prefix(STORE_DIR_WITH_SLASH.as_bytes()) {
+            Some(s_stripped) => Self::from_bytes(s_stripped),
+            None => Err(Error::MissingStoreDir),
+        }
     }
 
     /// Decompose a string into a [StorePath] and a [PathBuf] containing the
     /// rest of the path, or an error.
     #[cfg(target_family = "unix")]
-    pub fn from_absolute_path_full(s: &str) -> Result<(StorePath, PathBuf), Error> {
+    pub fn from_absolute_path_full<'a>(s: &'a str) -> Result<(Self, &'a Path), Error>
+    where
+        S: From<&'a str>,
+    {
         // strip [STORE_DIR_WITH_SLASH] from s
+
         match s.strip_prefix(STORE_DIR_WITH_SLASH) {
             None => Err(Error::MissingStoreDir),
             Some(rest) => {
-                // put rest in a PathBuf
-                let mut p = PathBuf::new();
-                p.push(rest);
-
-                let mut it = p.components();
+                let mut it = Path::new(rest).components();
 
                 // The first component of the rest must be parse-able as a [StorePath]
                 if let Some(first_component) = it.next() {
                     // convert first component to StorePath
-                    let first_component_bytes = first_component.as_os_str().to_owned().into_vec();
-                    let store_path = StorePath::from_bytes(&first_component_bytes)?;
+                    let store_path = StorePath::from_bytes(first_component.as_os_str().as_bytes())?;
+
                     // collect rest
-                    let rest_buf: PathBuf = it.collect();
+                    let rest_buf = it.as_path();
+
                     Ok((store_path, rest_buf))
                 } else {
                     Err(Error::InvalidLength) // Well, or missing "/"?
@@ -139,139 +190,48 @@ impl StorePath {
     /// Returns an absolute store path string.
     /// That is just the string representation, prefixed with the store prefix
     /// ([STORE_DIR_WITH_SLASH]),
-    pub fn to_absolute_path(&self) -> String {
-        let sp_ref: StorePathRef = self.into();
-        sp_ref.to_absolute_path()
-    }
-}
-
-impl<'de> Deserialize<'de> for StorePath {
-    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
-    where
-        D: serde::Deserializer<'de>,
-    {
-        let r = <StorePathRef<'de> as Deserialize<'de>>::deserialize(deserializer)?;
-        Ok(r.to_owned())
-    }
-}
-
-impl Serialize for StorePath {
-    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+    pub fn to_absolute_path(&self) -> String
     where
-        S: serde::Serializer,
+        S: Display,
     {
-        let r: StorePathRef = self.into();
-        r.serialize(serializer)
-    }
-}
-
-/// Like [StorePath], but without a heap allocation for the name.
-/// Used by [StorePath] for parsing.
-///
-#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)]
-pub struct StorePathRef<'a> {
-    digest: [u8; DIGEST_SIZE],
-    name: &'a str,
-}
-
-impl<'a> From<&'a StorePath> for StorePathRef<'a> {
-    fn from(&StorePath { digest, ref name }: &'a StorePath) -> Self {
-        StorePathRef { digest, name }
+        format!("{}{}", STORE_DIR_WITH_SLASH, self)
     }
 }
 
-impl<'a> PartialOrd for StorePathRef<'a> {
+impl<S> PartialOrd for StorePath<S>
+where
+    S: std::cmp::PartialEq + std::cmp::Eq,
+{
     fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
         Some(self.cmp(other))
     }
 }
 
-/// `StorePathRef`s are sorted by their reverse digest to match the sorting order
+/// `StorePath`s are sorted by their reverse digest to match the sorting order
 /// of the nixbase32-encoded string.
-impl<'a> Ord for StorePathRef<'a> {
+impl<S> Ord for StorePath<S>
+where
+    S: std::cmp::PartialEq + std::cmp::Eq,
+{
     fn cmp(&self, other: &Self) -> std::cmp::Ordering {
         self.digest.iter().rev().cmp(other.digest.iter().rev())
     }
 }
 
-impl<'a> StorePathRef<'a> {
-    pub fn digest(&self) -> &[u8; DIGEST_SIZE] {
-        &self.digest
-    }
-
-    pub fn name(&self) -> &'a str {
-        self.name
-    }
-
-    pub fn to_owned(&self) -> StorePath {
-        StorePath {
-            digest: self.digest,
-            name: self.name.into(),
-        }
-    }
-
-    /// Construct a [StorePathRef] from a name and digest.
-    /// The name is validated, and the digest checked for size.
-    pub fn from_name_and_digest(name: &'a str, digest: &[u8]) -> Result<Self, Error> {
-        let digest_fixed = digest.try_into().map_err(|_| Error::InvalidLength)?;
-        Self::from_name_and_digest_fixed(name, digest_fixed)
-    }
-
-    /// Construct a [StorePathRef] from a name and digest of correct length.
-    /// The name is validated.
-    pub fn from_name_and_digest_fixed(
-        name: &'a str,
-        digest: [u8; DIGEST_SIZE],
-    ) -> Result<Self, Error> {
-        Ok(Self {
-            name: validate_name(name.as_bytes())?,
-            digest,
-        })
-    }
-
-    /// Construct a [StorePathRef] from an absolute store path string.
-    /// This is equivalent to calling [StorePathRef::from_bytes], but stripping
-    /// the [STORE_DIR_WITH_SLASH] prefix before.
-    pub fn from_absolute_path(s: &'a [u8]) -> Result<Self, Error> {
-        match s.strip_prefix(STORE_DIR_WITH_SLASH.as_bytes()) {
-            Some(s_stripped) => Self::from_bytes(s_stripped),
-            None => Err(Error::MissingStoreDir),
-        }
-    }
+impl<'a, 'b: 'a> FromStr for StorePath<String> {
+    type Err = Error;
 
-    /// Construct a [StorePathRef] by passing the `$digest-$name` string
+    /// Construct a [StorePath] by passing the `$digest-$name` string
     /// that comes after [STORE_DIR_WITH_SLASH].
-    pub fn from_bytes(s: &'a [u8]) -> Result<Self, Error> {
-        // the whole string needs to be at least:
-        //
-        // - 32 characters (encoded hash)
-        // - 1 dash
-        // - 1 character for the name
-        if s.len() < ENCODED_DIGEST_SIZE + 2 {
-            Err(Error::InvalidLength)?
-        }
-
-        let digest = nixbase32::decode_fixed(&s[..ENCODED_DIGEST_SIZE])?;
-
-        if s[ENCODED_DIGEST_SIZE] != b'-' {
-            return Err(Error::MissingDash);
-        }
-
-        Ok(StorePathRef {
-            digest,
-            name: validate_name(&s[ENCODED_DIGEST_SIZE + 1..])?,
-        })
-    }
-
-    /// Returns an absolute store path string.
-    /// That is just the string representation, prefixed with the store prefix
-    /// ([STORE_DIR_WITH_SLASH]),
-    pub fn to_absolute_path(&self) -> String {
-        format!("{}{}", STORE_DIR_WITH_SLASH, self)
+    fn from_str(s: &str) -> Result<Self, Self::Err> {
+        StorePath::<String>::from_bytes(s.as_bytes())
     }
 }
 
-impl<'de: 'a, 'a> Deserialize<'de> for StorePathRef<'a> {
+impl<'a, 'de: 'a, S> Deserialize<'de> for StorePath<S>
+where
+    S: std::cmp::Eq + Deref<Target = str> + From<&'a str>,
+{
     fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
     where
         D: serde::Deserializer<'de>,
@@ -284,16 +244,19 @@ impl<'de: 'a, 'a> Deserialize<'de> for StorePathRef<'a> {
                 &"store path prefix",
             )
         })?;
-        StorePathRef::from_bytes(stripped.as_bytes()).map_err(|_| {
+        StorePath::from_bytes(stripped.as_bytes()).map_err(|_| {
             serde::de::Error::invalid_value(serde::de::Unexpected::Str(string), &"StorePath")
         })
     }
 }
 
-impl Serialize for StorePathRef<'_> {
-    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+impl<S> Serialize for StorePath<S>
+where
+    S: std::cmp::Eq + Deref<Target = str> + Display,
+{
+    fn serialize<SR>(&self, serializer: SR) -> Result<SR::Ok, SR::Error>
     where
-        S: serde::Serializer,
+        SR: serde::Serializer,
     {
         let string: String = self.to_absolute_path();
         string.serialize(serializer)
@@ -347,13 +310,10 @@ pub(crate) fn validate_name(s: &(impl AsRef<[u8]> + ?Sized)) -> Result<&str, Err
     Ok(unsafe { str::from_utf8_unchecked(s) })
 }
 
-impl fmt::Display for StorePath {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        StorePathRef::from(self).fmt(f)
-    }
-}
-
-impl fmt::Display for StorePathRef<'_> {
+impl<S> fmt::Display for StorePath<S>
+where
+    S: fmt::Display + std::cmp::Eq,
+{
     /// The string representation of a store path starts with a digest (20
     /// bytes), [crate::nixbase32]-encoded, followed by a `-`,
     /// and ends with the name.
@@ -386,12 +346,12 @@ mod tests {
     fn happy_path() {
         let example_nix_path_str =
             "00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432";
-        let nixpath = StorePath::from_bytes(example_nix_path_str.as_bytes())
+        let nixpath = StorePathRef::from_bytes(example_nix_path_str.as_bytes())
             .expect("Error parsing example string");
 
         let expected_digest: [u8; DIGEST_SIZE] = hex!("8a12321522fd91efbd60ebb2481af88580f61600");
 
-        assert_eq!("net-tools-1.60_p20170221182432", nixpath.name());
+        assert_eq!("net-tools-1.60_p20170221182432", *nixpath.name());
         assert_eq!(nixpath.digest, expected_digest);
 
         assert_eq!(example_nix_path_str, nixpath.to_string())
@@ -426,8 +386,8 @@ mod tests {
             if w.len() < 2 {
                 continue;
             }
-            let (pa, _) = StorePath::from_absolute_path_full(w[0]).expect("parseable");
-            let (pb, _) = StorePath::from_absolute_path_full(w[1]).expect("parseable");
+            let (pa, _) = StorePathRef::from_absolute_path_full(w[0]).expect("parseable");
+            let (pb, _) = StorePathRef::from_absolute_path_full(w[1]).expect("parseable");
             assert_eq!(
                 Ordering::Less,
                 pa.cmp(&pb),
@@ -448,36 +408,38 @@ mod tests {
     /// https://github.com/NixOS/nix/pull/9867 (revert-of-revert)
     #[test]
     fn starts_with_dot() {
-        StorePath::from_bytes(b"fli4bwscgna7lpm7v5xgnjxrxh0yc7ra-.gitignore")
+        StorePathRef::from_bytes(b"fli4bwscgna7lpm7v5xgnjxrxh0yc7ra-.gitignore")
             .expect("must succeed");
     }
 
     #[test]
     fn empty_name() {
-        StorePath::from_bytes(b"00bgd045z0d4icpbc2yy-").expect_err("must fail");
+        StorePathRef::from_bytes(b"00bgd045z0d4icpbc2yy-").expect_err("must fail");
     }
 
     #[test]
     fn excessive_length() {
-        StorePath::from_bytes(b"00bgd045z0d4icpbc2yy-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")
+        StorePathRef::from_bytes(b"00bgd045z0d4icpbc2yy-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")
             .expect_err("must fail");
     }
 
     #[test]
     fn invalid_hash_length() {
-        StorePath::from_bytes(b"00bgd045z0d4icpbc2yy-net-tools-1.60_p20170221182432")
+        StorePathRef::from_bytes(b"00bgd045z0d4icpbc2yy-net-tools-1.60_p20170221182432")
             .expect_err("must fail");
     }
 
     #[test]
     fn invalid_encoding_hash() {
-        StorePath::from_bytes(b"00bgd045z0d4icpbc2yyz4gx48aku4la-net-tools-1.60_p20170221182432")
-            .expect_err("must fail");
+        StorePathRef::from_bytes(
+            b"00bgd045z0d4icpbc2yyz4gx48aku4la-net-tools-1.60_p20170221182432",
+        )
+        .expect_err("must fail");
     }
 
     #[test]
     fn more_than_just_the_bare_nix_store_path() {
-        StorePath::from_bytes(
+        StorePathRef::from_bytes(
             b"00bgd045z0d4icpbc2yyz4gx48aku4la-net-tools-1.60_p20170221182432/bin/arp",
         )
         .expect_err("must fail");
@@ -485,7 +447,7 @@ mod tests {
 
     #[test]
     fn no_dash_between_hash_and_name() {
-        StorePath::from_bytes(b"00bgd045z0d4icpbc2yyz4gx48ak44lanet-tools-1.60_p20170221182432")
+        StorePathRef::from_bytes(b"00bgd045z0d4icpbc2yyz4gx48ak44lanet-tools-1.60_p20170221182432")
             .expect_err("must fail");
     }
 
@@ -534,7 +496,7 @@ mod tests {
 
     #[test]
     fn serialize_owned() {
-        let nixpath_actual = StorePath::from_bytes(
+        let nixpath_actual = StorePathRef::from_bytes(
             b"00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432",
         )
         .expect("can parse");
@@ -578,7 +540,8 @@ mod tests {
         let store_path_str_json =
             "\"/nix/store/00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432\"";
 
-        let store_path: StorePath = serde_json::from_str(store_path_str_json).expect("valid json");
+        let store_path: StorePath<String> =
+            serde_json::from_str(store_path_str_json).expect("valid json");
 
         assert_eq!(
             "/nix/store/00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432",
@@ -601,7 +564,7 @@ mod tests {
         StorePath::from_bytes(b"00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432").unwrap(), PathBuf::from("bin/arp/"))]
     fn from_absolute_path_full(
         #[case] s: &str,
-        #[case] exp_store_path: StorePath,
+        #[case] exp_store_path: StorePath<&str>,
         #[case] exp_path: PathBuf,
     ) {
         let (actual_store_path, actual_path) =
@@ -615,15 +578,15 @@ mod tests {
     fn from_absolute_path_errors() {
         assert_eq!(
             Error::InvalidLength,
-            StorePath::from_absolute_path_full("/nix/store/").expect_err("must fail")
+            StorePathRef::from_absolute_path_full("/nix/store/").expect_err("must fail")
         );
         assert_eq!(
             Error::InvalidLength,
-            StorePath::from_absolute_path_full("/nix/store/foo").expect_err("must fail")
+            StorePathRef::from_absolute_path_full("/nix/store/foo").expect_err("must fail")
         );
         assert_eq!(
             Error::MissingStoreDir,
-            StorePath::from_absolute_path_full(
+            StorePathRef::from_absolute_path_full(
                 "00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432"
             )
             .expect_err("must fail")
diff --git a/tvix/nix-compat/src/store_path/utils.rs b/tvix/nix-compat/src/store_path/utils.rs
index d6f390db85c2..4bfbb72fcdde 100644
--- a/tvix/nix-compat/src/store_path/utils.rs
+++ b/tvix/nix-compat/src/store_path/utils.rs
@@ -1,6 +1,6 @@
 use crate::nixbase32;
 use crate::nixhash::{CAHash, NixHash};
-use crate::store_path::{Error, StorePathRef, STORE_DIR};
+use crate::store_path::{Error, StorePath, StorePathRef, STORE_DIR};
 use data_encoding::HEXLOWER;
 use sha2::{Digest, Sha256};
 use thiserror;
@@ -43,11 +43,17 @@ pub fn compress_hash<const OUTPUT_SIZE: usize>(input: &[u8]) -> [u8; OUTPUT_SIZE
 /// derivation or a literal text file that may contain references.
 /// If you don't want to have to pass the entire contents, you might want to use
 /// [build_ca_path] instead.
-pub fn build_text_path<S: AsRef<str>, I: IntoIterator<Item = S>, C: AsRef<[u8]>>(
-    name: &str,
+pub fn build_text_path<'a, S, SP, I, C>(
+    name: &'a str,
     content: C,
     references: I,
-) -> Result<StorePathRef<'_>, BuildStorePathError> {
+) -> Result<StorePath<SP>, BuildStorePathError>
+where
+    S: AsRef<str>,
+    SP: std::cmp::Eq + std::ops::Deref<Target = str> + std::convert::From<&'a str>,
+    I: IntoIterator<Item = S>,
+    C: AsRef<[u8]>,
+{
     // produce the sha256 digest of the contents
     let content_digest = Sha256::new_with_prefix(content).finalize().into();
 
@@ -55,12 +61,17 @@ pub fn build_text_path<S: AsRef<str>, I: IntoIterator<Item = S>, C: AsRef<[u8]>>
 }
 
 /// This builds a store path from a [CAHash] and a list of references.
-pub fn build_ca_path<'a, S: AsRef<str>, I: IntoIterator<Item = S>>(
+pub fn build_ca_path<'a, S, SP, I>(
     name: &'a str,
     ca_hash: &CAHash,
     references: I,
     self_reference: bool,
-) -> Result<StorePathRef<'a>, BuildStorePathError> {
+) -> Result<StorePath<SP>, BuildStorePathError>
+where
+    S: AsRef<str>,
+    SP: std::cmp::Eq + std::ops::Deref<Target = str> + std::convert::From<&'a str>,
+    I: IntoIterator<Item = S>,
+{
     // self references are only allowed for CAHash::Nar(NixHash::Sha256(_)).
     if self_reference && matches!(ca_hash, CAHash::Nar(NixHash::Sha256(_))) {
         return Err(BuildStorePathError::InvalidReference());
@@ -145,17 +156,20 @@ pub fn build_output_path<'a>(
 /// bytes.
 /// Inside a StorePath, that digest is printed nixbase32-encoded
 /// (32 characters).
-fn build_store_path_from_fingerprint_parts<'a>(
+fn build_store_path_from_fingerprint_parts<'a, S>(
     ty: &str,
     inner_digest: &[u8; 32],
     name: &'a str,
-) -> Result<StorePathRef<'a>, Error> {
+) -> Result<StorePath<S>, Error>
+where
+    S: std::cmp::Eq + std::ops::Deref<Target = str> + std::convert::From<&'a str>,
+{
     let fingerprint = format!(
         "{ty}:sha256:{}:{STORE_DIR}:{name}",
         HEXLOWER.encode(inner_digest)
     );
     // name validation happens in here.
-    StorePathRef::from_name_and_digest_fixed(
+    StorePath::from_name_and_digest_fixed(
         name,
         compress_hash(&Sha256::new_with_prefix(fingerprint).finalize()),
     )
@@ -216,7 +230,7 @@ mod test {
         // nix-repl> builtins.toFile "foo" "bar"
         // "/nix/store/vxjiwkjkn7x4079qvh1jkl5pn05j2aw0-foo"
 
-        let store_path = build_text_path("foo", "bar", Vec::<String>::new())
+        let store_path: StorePathRef = build_text_path("foo", "bar", Vec::<String>::new())
             .expect("build_store_path() should succeed");
 
         assert_eq!(
@@ -232,11 +246,11 @@ mod test {
         // nix-repl> builtins.toFile "baz" "${builtins.toFile "foo" "bar"}"
         // "/nix/store/5xd714cbfnkz02h2vbsj4fm03x3f15nf-baz"
 
-        let inner = build_text_path("foo", "bar", Vec::<String>::new())
+        let inner: StorePathRef = build_text_path("foo", "bar", Vec::<String>::new())
             .expect("path_with_references() should succeed");
         let inner_path = inner.to_absolute_path();
 
-        let outer = build_text_path("baz", &inner_path, vec![inner_path.as_str()])
+        let outer: StorePathRef = build_text_path("baz", &inner_path, vec![inner_path.as_str()])
             .expect("path_with_references() should succeed");
 
         assert_eq!(
@@ -247,7 +261,7 @@ mod test {
 
     #[test]
     fn build_sha1_path() {
-        let outer = build_ca_path(
+        let outer: StorePathRef = build_ca_path(
             "bar",
             &CAHash::Nar(NixHash::Sha1(hex!(
                 "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33"
@@ -272,7 +286,7 @@ mod test {
         //
         // $ nix store make-content-addressed /nix/store/5xd714cbfnkz02h2vbsj4fm03x3f15nf-baz
         // rewrote '/nix/store/5xd714cbfnkz02h2vbsj4fm03x3f15nf-baz' to '/nix/store/s89y431zzhmdn3k8r96rvakryddkpv2v-baz'
-        let outer = build_ca_path(
+        let outer: StorePathRef = build_ca_path(
             "baz",
             &CAHash::Nar(NixHash::Sha256(
                 nixbase32::decode(b"1xqkzcb3909fp07qngljr4wcdnrh1gdam1m2n29i6hhrxlmkgkv1")
diff --git a/tvix/nix-compat/src/wire/bytes/mod.rs b/tvix/nix-compat/src/wire/bytes/mod.rs
index 2ed071e37985..74adfb49b6a4 100644
--- a/tvix/nix-compat/src/wire/bytes/mod.rs
+++ b/tvix/nix-compat/src/wire/bytes/mod.rs
@@ -1,9 +1,12 @@
+#[cfg(feature = "async")]
+use std::mem::MaybeUninit;
 use std::{
     io::{Error, ErrorKind},
-    mem::MaybeUninit,
     ops::RangeInclusive,
 };
-use tokio::io::{self, AsyncReadExt, AsyncWriteExt, ReadBuf};
+#[cfg(feature = "async")]
+use tokio::io::ReadBuf;
+use tokio::io::{self, AsyncReadExt, AsyncWriteExt};
 
 pub(crate) mod reader;
 pub use reader::BytesReader;
@@ -11,7 +14,7 @@ mod writer;
 pub use writer::BytesWriter;
 
 /// 8 null bytes, used to write out padding.
-const EMPTY_BYTES: &[u8; 8] = &[0u8; 8];
+pub(crate) const EMPTY_BYTES: &[u8; 8] = &[0u8; 8];
 
 /// The length of the size field, in bytes is always 8.
 const LEN_SIZE: usize = 8;
@@ -33,12 +36,9 @@ const LEN_SIZE: usize = 8;
 ///
 /// This buffers the entire payload into memory,
 /// a streaming version is available at [crate::wire::bytes::BytesReader].
-pub async fn read_bytes<R: ?Sized>(
-    r: &mut R,
-    allowed_size: RangeInclusive<usize>,
-) -> io::Result<Vec<u8>>
+pub async fn read_bytes<R>(r: &mut R, allowed_size: RangeInclusive<usize>) -> io::Result<Vec<u8>>
 where
-    R: AsyncReadExt + Unpin,
+    R: AsyncReadExt + Unpin + ?Sized,
 {
     // read the length field
     let len = r.read_u64_le().await?;
@@ -82,13 +82,14 @@ where
     Ok(buf)
 }
 
-pub(crate) async fn read_bytes_buf<'a, const N: usize, R: ?Sized>(
+#[cfg(feature = "async")]
+pub(crate) async fn read_bytes_buf<'a, const N: usize, R>(
     reader: &mut R,
     buf: &'a mut [MaybeUninit<u8>; N],
     allowed_size: RangeInclusive<usize>,
 ) -> io::Result<&'a [u8]>
 where
-    R: AsyncReadExt + Unpin,
+    R: AsyncReadExt + Unpin + ?Sized,
 {
     assert_eq!(N % 8, 0);
     assert!(*allowed_size.end() <= N);
@@ -135,6 +136,7 @@ where
 }
 
 /// SAFETY: The bytes have to actually be initialized.
+#[cfg(feature = "async")]
 unsafe fn assume_init_bytes(slice: &[MaybeUninit<u8>]) -> &[u8] {
     &*(slice as *const [MaybeUninit<u8>] as *const [u8])
 }
diff --git a/tvix/nix-compat/src/wire/bytes/reader/mod.rs b/tvix/nix-compat/src/wire/bytes/reader/mod.rs
index 6bd376c06fb8..77950496ed6b 100644
--- a/tvix/nix-compat/src/wire/bytes/reader/mod.rs
+++ b/tvix/nix-compat/src/wire/bytes/reader/mod.rs
@@ -109,8 +109,6 @@ where
     }
 
     /// Remaining data length, ie not including data already read.
-    ///
-    /// If the size has not been read yet, this is [None].
     pub fn len(&self) -> u64 {
         match self.state {
             State::Body {
@@ -152,7 +150,7 @@ impl<R: AsyncRead + Unpin, T: Tag> AsyncRead for BytesReader<R, T> {
                     let mut bytes_read = 0;
                     ready!(with_limited(buf, remaining, |buf| {
                         let ret = reader.poll_read(cx, buf);
-                        bytes_read = buf.initialized().len();
+                        bytes_read = buf.filled().len();
                         ret
                     }))?;
 
@@ -414,6 +412,7 @@ mod tests {
     }
 
     /// Read the trailer immediately if there is no payload.
+    #[cfg(feature = "async")]
     #[tokio::test]
     async fn read_trailer_immediately() {
         use crate::nar::wire::PadPar;
@@ -431,6 +430,7 @@ mod tests {
     }
 
     /// Read the trailer even if we only read the exact payload size.
+    #[cfg(feature = "async")]
     #[tokio::test]
     async fn read_exact_trailer() {
         use crate::nar::wire::PadPar;
diff --git a/tvix/nix-compat/testdata/nix.conf b/tvix/nix-compat/testdata/nix.conf
new file mode 100644
index 000000000000..1fa089053eb6
--- /dev/null
+++ b/tvix/nix-compat/testdata/nix.conf
@@ -0,0 +1,20 @@
+# WARNING: this file is generated from the nix.* options in
+# your NixOS configuration, typically
+# /etc/nixos/configuration.nix.  Do not edit it!
+allowed-users = *
+auto-optimise-store = false
+cores = 0
+max-jobs = 8
+require-sigs = true
+sandbox = true
+sandbox-fallback = false
+substituters = https://nix-community.cachix.org https://cache.nixos.org/
+system-features = nixos-test benchmark big-parallel kvm
+trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= nix-community.cachix.org-1:mB9FSh9qf2dCimDSUo8Zy7bkq5CX+/rkCWyvRCYg3Fs=
+trusted-substituters = 
+trusted-users = flokli
+extra-platforms = aarch64-linux i686-linux
+extra-sandbox-paths = /run/binfmt /nix/store/swwyxyqpazzvbwx8bv40z7ih144q841f-qemu-aarch64-binfmt-P-x86_64-unknown-linux-musl
+experimental-features = nix-command
+
+builders-use-substitutes = true
diff --git a/tvix/nix-compat/testdata/other_nix.conf b/tvix/nix-compat/testdata/other_nix.conf
new file mode 100644
index 000000000000..63c4ea2ec78e
--- /dev/null
+++ b/tvix/nix-compat/testdata/other_nix.conf
@@ -0,0 +1,18 @@
+# This file contains some more comments in various places.
+allowed-users = *
+auto-optimise-store = false
+cores = 0
+max-jobs = 8
+require-sigs = true
+sandbox=true
+sandbox-fallback =
false
+substituters = https://nix-community.cachix.org https://cache.nixos.org/ #comment # stillcomment
+system-features = nixos-test benchmark big-parallel kvm
+trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= nix-community.cachix.org-1:mB9FSh9qf2dCimDSUo8Zy7bkq5CX+/rkCWyvRCYg3Fs=
+trusted-substituters = 
+trusted-users = flokli
+extra-platforms = aarch64-linux i686-linux
+extra-sandbox-paths = /run/binfmt /nix/store/swwyxyqpazzvbwx8bv40z7ih144q841f-qemu-aarch64-binfmt-P-x86_64-unknown-linux-musl
+experimental-features = nix-command
+
+builders-use-substitutes = true
diff --git a/tvix/serde/Cargo.toml b/tvix/serde/Cargo.toml
index 5652126ada1d..fc5f08a2ddcd 100644
--- a/tvix/serde/Cargo.toml
+++ b/tvix/serde/Cargo.toml
@@ -5,5 +5,5 @@ edition = "2021"
 
 [dependencies]
 tvix-eval = { path = "../eval" }
-serde = { version = "1.0", features = ["derive"] }
-bstr = { version = "1.8.0", features = ["serde"] }
+serde = { workspace = true, features = ["derive"] }
+bstr = { workspace = true, features = ["serde"] }
diff --git a/tvix/serde/examples/nixpkgs.rs b/tvix/serde/examples/nixpkgs.rs
index ad8c4160b5d0..e3c589dba118 100644
--- a/tvix/serde/examples/nixpkgs.rs
+++ b/tvix/serde/examples/nixpkgs.rs
@@ -23,8 +23,8 @@ fn main() {
     }
     "#;
 
-    let result = tvix_serde::from_str_with_config::<Config, _>(code, |eval| {
-        eval.enable_impure(None);
+    let result = tvix_serde::from_str_with_config::<Config, _>(code, |eval_builder| {
+        eval_builder.enable_impure(None)
     });
 
     match result {
diff --git a/tvix/serde/src/de.rs b/tvix/serde/src/de.rs
index 428b9e2e8114..88cb46d618ce 100644
--- a/tvix/serde/src/de.rs
+++ b/tvix/serde/src/de.rs
@@ -3,8 +3,7 @@
 use bstr::ByteSlice;
 use serde::de::value::{MapDeserializer, SeqDeserializer};
 use serde::de::{self, EnumAccess, VariantAccess};
-pub use tvix_eval::Evaluation;
-use tvix_eval::{EvalIO, Value};
+use tvix_eval::{EvalIO, EvaluationBuilder, Value};
 
 use crate::error::Error;
 
@@ -36,7 +35,7 @@ pub fn from_str<'code, T>(src: &'code str) -> Result<T, Error>
 where
     T: serde::Deserialize<'code>,
 {
-    from_str_with_config(src, |_| /* no extra config */ ())
+    from_str_with_config(src, |b| /* no extra config */ b)
 }
 
 /// Evaluate the Nix code in `src`, with extra configuration for the
@@ -44,13 +43,12 @@ where
 pub fn from_str_with_config<'code, T, F>(src: &'code str, config: F) -> Result<T, Error>
 where
     T: serde::Deserialize<'code>,
-    F: FnOnce(&mut Evaluation<Box<dyn EvalIO>>),
+    F: for<'co, 'ro, 'env> FnOnce(
+        EvaluationBuilder<'co, 'ro, 'env, Box<dyn EvalIO>>,
+    ) -> EvaluationBuilder<'co, 'ro, 'env, Box<dyn EvalIO>>,
 {
     // First step is to evaluate the Nix code ...
-    let mut eval = Evaluation::new_pure();
-    config(&mut eval);
-
-    eval.strict = true;
+    let eval = config(EvaluationBuilder::new_pure().strict()).build();
     let result = eval.evaluate(src, None);
 
     if !result.errors.is_empty() {
diff --git a/tvix/serde/src/de_tests.rs b/tvix/serde/src/de_tests.rs
index 1c3acd1c2f52..d62464da9a9f 100644
--- a/tvix/serde/src/de_tests.rs
+++ b/tvix/serde/src/de_tests.rs
@@ -204,7 +204,7 @@ fn deserialize_enum_all() {
 fn deserialize_with_config() {
     let result: String = from_str_with_config("builtins.testWithConfig", |eval| {
         // Add a literal string builtin that just returns `"ok"`.
-        eval.src_builtins.push(("testWithConfig", "\"ok\""));
+        eval.add_src_builtin("testWithConfig", "\"ok\"")
     })
     .expect("should deserialize");
 
@@ -236,10 +236,9 @@ mod test_builtins {
 fn deserialize_with_extra_builtin() {
     let code = "builtins.prependHello \"world\"";
 
-    let result: String = from_str_with_config(code, |eval| {
-        eval.builtins.append(&mut test_builtins::builtins());
-    })
-    .expect("should deserialize");
+    let result: String =
+        from_str_with_config(code, |eval| eval.add_builtins(test_builtins::builtins()))
+            .expect("should deserialize");
 
     assert_eq!(result, "hello world");
 }
diff --git a/tvix/shell.nix b/tvix/shell.nix
index f0d8ab16574d..d6b4bb7d3458 100644
--- a/tvix/shell.nix
+++ b/tvix/shell.nix
@@ -10,12 +10,18 @@
     depot.third_party.sources = import ./sources { };
     additionalOverlays = [
       (self: super: {
-        # https://github.com/googleapis/google-cloud-go/pull/9665
-        cbtemulator = super.cbtemulator.overrideAttrs (old: {
-          patches = old.patches or [ ] ++ [
-            ./nixpkgs/cbtemulator-uds.patch
-          ];
-        });
+        # macFUSE bump containing fix for https://github.com/osxfuse/osxfuse/issues/974
+        # https://github.com/NixOS/nixpkgs/pull/320197
+        fuse =
+          if super.stdenv.isDarwin then
+            super.fuse.overrideAttrs
+              (old: rec {
+                version = "4.8.0";
+                src = super.fetchurl {
+                  url = "https://github.com/osxfuse/osxfuse/releases/download/macfuse-${version}/macfuse-${version}.dmg";
+                  hash = "sha256-ucTzO2qdN4QkowMVvC3+4pjEVjbwMsB0xFk+bvQxwtQ=";
+                };
+              }) else super.fuse;
       })
     ];
   })
@@ -30,12 +36,15 @@ pkgs.mkShell {
     pkgs.cargo-machete
     pkgs.cargo-expand
     pkgs.clippy
+    pkgs.d2
     pkgs.evans
     pkgs.fuse
     pkgs.go
     pkgs.grpcurl
     pkgs.hyperfine
     pkgs.mdbook
+    pkgs.mdbook-admonish
+    pkgs.mdbook-d2
     pkgs.mdbook-plantuml
     pkgs.nix_2_3 # b/313
     pkgs.pkg-config
diff --git a/tvix/store-go/pathinfo.pb.go b/tvix/store-go/pathinfo.pb.go
index a4915a3c1f25..2609600c254f 100644
--- a/tvix/store-go/pathinfo.pb.go
+++ b/tvix/store-go/pathinfo.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.2
 // 	protoc        (unknown)
 // source: tvix/store/protos/pathinfo.proto
 
@@ -545,7 +545,7 @@ func file_tvix_store_protos_pathinfo_proto_rawDescGZIP() []byte {
 
 var file_tvix_store_protos_pathinfo_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
 var file_tvix_store_protos_pathinfo_proto_msgTypes = make([]protoimpl.MessageInfo, 5)
-var file_tvix_store_protos_pathinfo_proto_goTypes = []interface{}{
+var file_tvix_store_protos_pathinfo_proto_goTypes = []any{
 	(NARInfo_CA_Hash)(0),      // 0: tvix.store.v1.NARInfo.CA.Hash
 	(*PathInfo)(nil),          // 1: tvix.store.v1.PathInfo
 	(*StorePath)(nil),         // 2: tvix.store.v1.StorePath
@@ -574,7 +574,7 @@ func file_tvix_store_protos_pathinfo_proto_init() {
 		return
 	}
 	if !protoimpl.UnsafeEnabled {
-		file_tvix_store_protos_pathinfo_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_store_protos_pathinfo_proto_msgTypes[0].Exporter = func(v any, i int) any {
 			switch v := v.(*PathInfo); i {
 			case 0:
 				return &v.state
@@ -586,7 +586,7 @@ func file_tvix_store_protos_pathinfo_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_store_protos_pathinfo_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_store_protos_pathinfo_proto_msgTypes[1].Exporter = func(v any, i int) any {
 			switch v := v.(*StorePath); i {
 			case 0:
 				return &v.state
@@ -598,7 +598,7 @@ func file_tvix_store_protos_pathinfo_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_store_protos_pathinfo_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_store_protos_pathinfo_proto_msgTypes[2].Exporter = func(v any, i int) any {
 			switch v := v.(*NARInfo); i {
 			case 0:
 				return &v.state
@@ -610,7 +610,7 @@ func file_tvix_store_protos_pathinfo_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_store_protos_pathinfo_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_store_protos_pathinfo_proto_msgTypes[3].Exporter = func(v any, i int) any {
 			switch v := v.(*NARInfo_Signature); i {
 			case 0:
 				return &v.state
@@ -622,7 +622,7 @@ func file_tvix_store_protos_pathinfo_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_store_protos_pathinfo_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_store_protos_pathinfo_proto_msgTypes[4].Exporter = func(v any, i int) any {
 			switch v := v.(*NARInfo_CA); i {
 			case 0:
 				return &v.state
diff --git a/tvix/store-go/rpc_pathinfo.pb.go b/tvix/store-go/rpc_pathinfo.pb.go
index 883ffb3f01ba..3acdd32e8529 100644
--- a/tvix/store-go/rpc_pathinfo.pb.go
+++ b/tvix/store-go/rpc_pathinfo.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.2
 // 	protoc        (unknown)
 // source: tvix/store/protos/rpc_pathinfo.proto
 
@@ -256,7 +256,7 @@ func file_tvix_store_protos_rpc_pathinfo_proto_rawDescGZIP() []byte {
 }
 
 var file_tvix_store_protos_rpc_pathinfo_proto_msgTypes = make([]protoimpl.MessageInfo, 3)
-var file_tvix_store_protos_rpc_pathinfo_proto_goTypes = []interface{}{
+var file_tvix_store_protos_rpc_pathinfo_proto_goTypes = []any{
 	(*GetPathInfoRequest)(nil),   // 0: tvix.store.v1.GetPathInfoRequest
 	(*ListPathInfoRequest)(nil),  // 1: tvix.store.v1.ListPathInfoRequest
 	(*CalculateNARResponse)(nil), // 2: tvix.store.v1.CalculateNARResponse
@@ -286,7 +286,7 @@ func file_tvix_store_protos_rpc_pathinfo_proto_init() {
 	}
 	file_tvix_store_protos_pathinfo_proto_init()
 	if !protoimpl.UnsafeEnabled {
-		file_tvix_store_protos_rpc_pathinfo_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_store_protos_rpc_pathinfo_proto_msgTypes[0].Exporter = func(v any, i int) any {
 			switch v := v.(*GetPathInfoRequest); i {
 			case 0:
 				return &v.state
@@ -298,7 +298,7 @@ func file_tvix_store_protos_rpc_pathinfo_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_store_protos_rpc_pathinfo_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_store_protos_rpc_pathinfo_proto_msgTypes[1].Exporter = func(v any, i int) any {
 			switch v := v.(*ListPathInfoRequest); i {
 			case 0:
 				return &v.state
@@ -310,7 +310,7 @@ func file_tvix_store_protos_rpc_pathinfo_proto_init() {
 				return nil
 			}
 		}
-		file_tvix_store_protos_rpc_pathinfo_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
+		file_tvix_store_protos_rpc_pathinfo_proto_msgTypes[2].Exporter = func(v any, i int) any {
 			switch v := v.(*CalculateNARResponse); i {
 			case 0:
 				return &v.state
@@ -323,7 +323,7 @@ func file_tvix_store_protos_rpc_pathinfo_proto_init() {
 			}
 		}
 	}
-	file_tvix_store_protos_rpc_pathinfo_proto_msgTypes[0].OneofWrappers = []interface{}{
+	file_tvix_store_protos_rpc_pathinfo_proto_msgTypes[0].OneofWrappers = []any{
 		(*GetPathInfoRequest_ByOutputHash)(nil),
 	}
 	type x struct{}
diff --git a/tvix/store/Cargo.toml b/tvix/store/Cargo.toml
index 4727f43f78ff..9d54ad1ea760 100644
--- a/tvix/store/Cargo.toml
+++ b/tvix/store/Cargo.toml
@@ -4,66 +4,62 @@ version = "0.1.0"
 edition = "2021"
 
 [dependencies]
-anyhow = "1.0.68"
-async-compression = { version = "0.4.9", features = ["tokio", "bzip2", "gzip", "xz", "zstd"]}
-async-stream = "0.3.5"
-blake3 = { version = "1.3.1", features = ["rayon", "std"] }
-bstr = "1.6.0"
-bytes = "1.4.0"
-clap = { version = "4.0", features = ["derive", "env"] }
-count-write = "0.1.0"
-data-encoding = "2.3.3"
-futures = "0.3.30"
-lazy_static = "1.4.0"
+anyhow = { workspace = true }
+async-compression = { workspace = true, features = ["tokio", "bzip2", "gzip", "xz", "zstd"] }
+async-stream = { workspace = true }
+blake3 = { workspace = true, features = ["rayon", "std"] }
+bstr = { workspace = true }
+bytes = { workspace = true }
+clap = { workspace = true, features = ["derive", "env"] }
+count-write = { workspace = true }
+data-encoding = { workspace = true }
+futures = { workspace = true }
+lazy_static = { workspace = true }
 nix-compat = { path = "../nix-compat", features = ["async"] }
-pin-project-lite = "0.2.13"
-prost = "0.12.1"
-opentelemetry = { version = "0.22.0", optional = true}
-opentelemetry-otlp = { version = "0.15.0", optional = true }
-opentelemetry_sdk = { version = "0.22.1", features = ["rt-tokio"], optional = true}
-serde = { version = "1.0.197", features = [ "derive" ] }
-serde_json = "1.0"
-serde_with = "3.7.0"
-serde_qs = "0.12.0"
-sha2 = "0.10.6"
-sled = { version = "0.34.7" }
-thiserror = "1.0.38"
-tokio = { version = "1.32.0", features = ["fs", "macros", "net", "rt", "rt-multi-thread", "signal"] }
-tokio-listener = { version = "0.4.1", features = [ "tonic011" ] }
-tokio-stream = { version = "0.1.14", features = ["fs"] }
-tokio-util = { version = "0.7.9", features = ["io", "io-util", "compat"] }
-tonic = { version = "0.11.0", features = ["tls", "tls-roots"] }
-tower = "0.4.13"
-tracing = "0.1.37"
-tracing-opentelemetry = "0.23.0"
-tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
+pin-project-lite = { workspace = true }
+prost = { workspace = true }
+serde = { workspace = true, features = ["derive"] }
+serde_json = { workspace = true }
+serde_with = { workspace = true }
+serde_qs = { workspace = true }
+sha2 = { workspace = true }
+sled = { workspace = true }
+thiserror = { workspace = true }
+tokio = { workspace = true, features = ["fs", "macros", "net", "rt", "rt-multi-thread", "signal"] }
+tokio-listener = { workspace = true, features = ["clap", "multi-listener", "sd_listen", "tonic012"] }
+tokio-stream = { workspace = true, features = ["fs"] }
+tokio-util = { workspace = true, features = ["io", "io-util", "compat"] }
+tonic = { workspace = true, features = ["tls", "tls-roots"] }
+tower = { workspace = true }
+tower-http = { workspace = true, features = ["trace"] }
 tvix-castore = { path = "../castore" }
-url = "2.4.0"
-walkdir = "2.4.0"
-reqwest = { version = "0.11.22", features = ["rustls-tls-native-roots", "stream"], default-features = false }
-lru = "0.12.3"
-parking_lot = "0.12.2"
-
-[dependencies.tonic-reflection]
-optional = true
-version = "0.11.0"
-
-[dependencies.bigtable_rs]
-optional = true
-# https://github.com/liufuyang/bigtable_rs/pull/72
-git = "https://github.com/flokli/bigtable_rs"
-rev = "0af404741dfc40eb9fa99cf4d4140a09c5c20df7"
+url = { workspace = true }
+walkdir = { workspace = true }
+reqwest = { workspace = true, features = ["rustls-tls-native-roots", "stream"] }
+reqwest-middleware = { workspace = true }
+lru = { workspace = true }
+parking_lot = { workspace = true }
+tvix-tracing = { path = "../tracing", features = ["tonic", "reqwest"] }
+tracing = { workspace = true }
+tracing-indicatif = { workspace = true }
+hyper-util = { workspace = true }
+toml = { version = "0.8.19", optional = true }
+tonic-health = { workspace = true }
+redb = { workspace = true }
+mimalloc = { workspace = true }
+tonic-reflection = { workspace = true, optional = true }
+bigtable_rs = { workspace = true, optional = true }
 
 [build-dependencies]
-prost-build = "0.12.1"
-tonic-build = "0.11.0"
+prost-build = { workspace = true }
+tonic-build = { workspace = true }
 
 [dev-dependencies]
-async-process = "2.1.0"
-rstest = "0.19.0"
-rstest_reuse = "0.6.0"
-tempfile = "3.3.0"
-tokio-retry = "0.3.0"
+async-process = { workspace = true }
+rstest = { workspace = true }
+rstest_reuse = { workspace = true }
+tempfile = { workspace = true }
+tokio-retry = { workspace = true }
 
 [features]
 default = ["cloud", "fuse", "otlp", "tonic-reflection"]
@@ -72,9 +68,11 @@ cloud = [
   "tvix-castore/cloud"
 ]
 fuse = ["tvix-castore/fuse"]
-otlp = ["dep:opentelemetry", "dep:opentelemetry-otlp", "dep:opentelemetry_sdk"]
+otlp = ["tvix-tracing/otlp"]
 tonic-reflection = ["dep:tonic-reflection", "tvix-castore/tonic-reflection"]
+tracy = ["tvix-tracing/tracy"]
 virtiofs = ["tvix-castore/virtiofs"]
+xp-store-composition = ["toml"]
 # Whether to run the integration tests.
 # Requires the following packages in $PATH:
 # cbtemulator, google-cloud-bigtable-tool
diff --git a/tvix/store/build.rs b/tvix/store/build.rs
index 809fa29578b5..4e0371311220 100644
--- a/tvix/store/build.rs
+++ b/tvix/store/build.rs
@@ -12,24 +12,20 @@ fn main() -> Result<()> {
         builder = builder.file_descriptor_set_path(descriptor_path);
     };
 
-    // https://github.com/hyperium/tonic/issues/908
-    let mut config = prost_build::Config::new();
-    config.bytes(["."]);
-    config.extern_path(".tvix.castore.v1", "::tvix_castore::proto");
-
     builder
         .build_server(true)
         .build_client(true)
         .emit_rerun_if_changed(false)
-        .compile_with_config(
-            config,
+        .bytes(["."])
+        .extern_path(".tvix.castore.v1", "::tvix_castore::proto")
+        .compile(
             &[
                 "tvix/store/protos/pathinfo.proto",
                 "tvix/store/protos/rpc_pathinfo.proto",
             ],
             // If we are in running `cargo build` manually, using `../..` works fine,
             // but in case we run inside a nix build, we need to instead point PROTO_ROOT
-            // to a sparseTree containing that structure.
+            // to a custom tree containing that structure.
             &[match std::env::var_os("PROTO_ROOT") {
                 Some(proto_root) => proto_root.to_str().unwrap().to_owned(),
                 None => "../..".to_string(),
diff --git a/tvix/store/default.nix b/tvix/store/default.nix
index ad47994f2446..863ddb6de23f 100644
--- a/tvix/store/default.nix
+++ b/tvix/store/default.nix
@@ -1,4 +1,4 @@
-{ depot, pkgs, ... }:
+{ depot, pkgs, lib, ... }:
 
 let
   mkImportCheck = p: expectedPath: {
@@ -22,31 +22,35 @@ let
   };
 in
 
-(depot.tvix.crates.workspaceMembers.tvix-store.build.override {
+(depot.tvix.crates.workspaceMembers.tvix-store.build.override (old: {
   runTests = true;
   testPreRun = ''
-    export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt
+    export SSL_CERT_FILE=/dev/null
   '';
-
-  # enable some optional features.
-  features = [ "default" "cloud" ]
-    # virtiofs feature currently fails to build on Darwin.
-    ++ pkgs.lib.optional pkgs.stdenv.isLinux "virtiofs";
-}).overrideAttrs (_: {
-  meta.ci.targets = [ "integration-tests" ];
-  meta.ci.extraSteps = {
-    import-docs = (mkImportCheck "tvix/store/docs" ./docs);
+  features = old.features
+    # virtiofs feature currently fails to build on Darwin
+    ++ lib.optional pkgs.stdenv.isLinux "virtiofs";
+})).overrideAttrs (old: rec {
+  meta.ci = {
+    targets = [ "integration-tests" ] ++ lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+    extraSteps.import-docs = (mkImportCheck "tvix/docs/src/store" ../docs/src/store);
   };
-  passthru.integration-tests = depot.tvix.crates.workspaceMembers.tvix-store.build.override {
-    runTests = true;
-    testPreRun = ''
-      export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt
-      export PATH="$PATH:${pkgs.lib.makeBinPath [pkgs.cbtemulator pkgs.google-cloud-bigtable-tool]}"
+  passthru = old.passthru // (depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = ([ "cloud" "fuse" "otlp" "tonic-reflection" "xp-store-composition" ]
+      # virtiofs feature currently fails to build on Darwin
+      ++ lib.optional pkgs.stdenv.isLinux "virtiofs");
+    override.testPreRun = ''
+      export SSL_CERT_FILE=/dev/null
     '';
-
-    # enable some optional features.
-    features = [ "default" "cloud" "integration" ]
-      # virtiofs feature currently fails to build on Darwin.
-      ++ pkgs.lib.optional pkgs.stdenv.isLinux "virtiofs";
+  }) // {
+    integration-tests = depot.tvix.crates.workspaceMembers.${old.crateName}.build.override (old: {
+      runTests = true;
+      testPreRun = ''
+        export SSL_CERT_FILE=/dev/null
+        export PATH="$PATH:${pkgs.lib.makeBinPath [ pkgs.cbtemulator pkgs.google-cloud-bigtable-tool ]}"
+      '';
+      features = old.features ++ [ "integration" ];
+    });
   };
 })
diff --git a/tvix/store/protos/default.nix b/tvix/store/protos/default.nix
index 56345d9338b2..005a2697e5e9 100644
--- a/tvix/store/protos/default.nix
+++ b/tvix/store/protos/default.nix
@@ -1,17 +1,12 @@
-{ depot, pkgs, ... }:
+{ depot, pkgs, lib, ... }:
 let
-  protos = depot.nix.sparseTree {
-    name = "store-protos";
-    root = depot.path.origSrc;
-    paths = [
-      # We need to include castore.proto (only), as it's referred.
-      ../../castore/protos/castore.proto
-      ./pathinfo.proto
-      ./rpc_pathinfo.proto
-      ../../../buf.yaml
-      ../../../buf.gen.yaml
-    ];
-  };
+  protos = lib.sourceByRegex depot.path.origSrc [
+    "buf.yaml"
+    "buf.gen.yaml"
+    # We need to include castore.proto (only), as it's referred.
+    "^tvix(/castore(/protos(/castore\.proto)?)?)?$"
+    "^tvix(/store(/protos(/.*\.proto)?)?)?$"
+  ];
 in
 depot.nix.readTree.drvTargets {
   inherit protos;
diff --git a/tvix/store/src/bin/tvix-store.rs b/tvix/store/src/bin/tvix-store.rs
index 906d0ab5206a..6da239a8fee3 100644
--- a/tvix/store/src/bin/tvix-store.rs
+++ b/tvix/store/src/bin/tvix-store.rs
@@ -9,19 +9,16 @@ use serde::Deserialize;
 use serde::Serialize;
 use std::path::PathBuf;
 use std::sync::Arc;
-use tokio_listener::Listener;
-use tokio_listener::SystemOptions;
-use tokio_listener::UserOptions;
 use tonic::transport::Server;
-use tracing::info;
-use tracing::Level;
-use tracing_subscriber::EnvFilter;
-use tracing_subscriber::Layer;
-use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
+use tower::ServiceBuilder;
+use tower_http::trace::{DefaultMakeSpan, TraceLayer};
+use tracing::{info, info_span, instrument, Level, Span};
+use tracing_indicatif::span_ext::IndicatifSpanExt as _;
 use tvix_castore::import::fs::ingest_path;
 use tvix_store::nar::NarCalculationService;
 use tvix_store::proto::NarInfo;
 use tvix_store::proto::PathInfo;
+use tvix_store::utils::{ServiceUrls, ServiceUrlsGrpc};
 
 use tvix_castore::proto::blob_service_server::BlobServiceServer;
 use tvix_castore::proto::directory_service_server::DirectoryServiceServer;
@@ -37,15 +34,6 @@ use tvix_store::pathinfoservice::make_fs;
 #[cfg(feature = "fuse")]
 use tvix_castore::fs::fuse::FuseDaemon;
 
-#[cfg(feature = "otlp")]
-use opentelemetry::KeyValue;
-#[cfg(feature = "otlp")]
-use opentelemetry_sdk::{
-    resource::{ResourceDetector, SdkProvidedResourceDetector},
-    trace::BatchConfig,
-    Resource,
-};
-
 #[cfg(feature = "virtiofs")]
 use tvix_castore::fs::virtiofs::start_virtiofs_daemon;
 
@@ -54,6 +42,11 @@ use tvix_castore::proto::FILE_DESCRIPTOR_SET as CASTORE_FILE_DESCRIPTOR_SET;
 #[cfg(feature = "tonic-reflection")]
 use tvix_store::proto::FILE_DESCRIPTOR_SET;
 
+use mimalloc::MiMalloc;
+
+#[global_allocator]
+static GLOBAL: MiMalloc = MiMalloc;
+
 #[derive(Parser)]
 #[command(author, version, about, long_about = None)]
 struct Cli {
@@ -65,8 +58,8 @@ struct Cli {
     /// It's also possible to set `RUST_LOG` according to
     /// `tracing_subscriber::filter::EnvFilter`, which will always have
     /// priority.
-    #[arg(long)]
-    log_level: Option<Level>,
+    #[arg(long, default_value_t=Level::INFO)]
+    log_level: Level,
 
     #[command(subcommand)]
     command: Commands,
@@ -76,51 +69,26 @@ struct Cli {
 enum Commands {
     /// Runs the tvix-store daemon.
     Daemon {
-        #[arg(long, short = 'l')]
-        listen_address: Option<String>,
-
-        #[arg(
-            long,
-            env,
-            default_value = "objectstore+file:///var/lib/tvix-store/blobs.object_store"
-        )]
-        blob_service_addr: String,
-
-        #[arg(
-            long,
-            env,
-            default_value = "sled:///var/lib/tvix-store/directories.sled"
-        )]
-        directory_service_addr: String,
-
-        #[arg(long, env, default_value = "sled:///var/lib/tvix-store/pathinfo.sled")]
-        path_info_service_addr: String,
+        /// The address to listen on.
+        #[clap(flatten)]
+        listen_args: tokio_listener::ListenerAddressLFlag,
+
+        #[clap(flatten)]
+        service_addrs: ServiceUrls,
     },
     /// Imports a list of paths into the store, print the store path for each of them.
     Import {
         #[clap(value_name = "PATH")]
         paths: Vec<PathBuf>,
 
-        #[arg(long, env, default_value = "grpc+http://[::1]:8000")]
-        blob_service_addr: String,
-
-        #[arg(long, env, default_value = "grpc+http://[::1]:8000")]
-        directory_service_addr: String,
-
-        #[arg(long, env, default_value = "grpc+http://[::1]:8000")]
-        path_info_service_addr: String,
+        #[clap(flatten)]
+        service_addrs: ServiceUrlsGrpc,
     },
 
     /// Copies a list of store paths on the system into tvix-store.
     Copy {
-        #[arg(long, env, default_value = "grpc+http://[::1]:8000")]
-        blob_service_addr: String,
-
-        #[arg(long, env, default_value = "grpc+http://[::1]:8000")]
-        directory_service_addr: String,
-
-        #[arg(long, env, default_value = "grpc+http://[::1]:8000")]
-        path_info_service_addr: String,
+        #[clap(flatten)]
+        service_addrs: ServiceUrlsGrpc,
 
         /// A path pointing to a JSON file produced by the Nix
         /// `__structuredAttrs` containing reference graph information provided
@@ -140,14 +108,8 @@ enum Commands {
         #[clap(value_name = "PATH")]
         dest: PathBuf,
 
-        #[arg(long, env, default_value = "grpc+http://[::1]:8000")]
-        blob_service_addr: String,
-
-        #[arg(long, env, default_value = "grpc+http://[::1]:8000")]
-        directory_service_addr: String,
-
-        #[arg(long, env, default_value = "grpc+http://[::1]:8000")]
-        path_info_service_addr: String,
+        #[clap(flatten)]
+        service_addrs: ServiceUrlsGrpc,
 
         /// Number of FUSE threads to spawn.
         #[arg(long, env, default_value_t = default_threads())]
@@ -176,14 +138,8 @@ enum Commands {
         #[clap(value_name = "PATH")]
         socket: PathBuf,
 
-        #[arg(long, env, default_value = "grpc+http://[::1]:8000")]
-        blob_service_addr: String,
-
-        #[arg(long, env, default_value = "grpc+http://[::1]:8000")]
-        directory_service_addr: String,
-
-        #[arg(long, env, default_value = "grpc+http://[::1]:8000")]
-        path_info_service_addr: String,
+        #[clap(flatten)]
+        service_addrs: ServiceUrlsGrpc,
 
         /// Whether to list elements at the root of the mount point.
         /// This is useful if your PathInfoService doesn't provide an
@@ -197,113 +153,41 @@ enum Commands {
     },
 }
 
-#[cfg(all(feature = "fuse", not(target_os = "macos")))]
+#[cfg(feature = "fuse")]
 fn default_threads() -> usize {
     std::thread::available_parallelism()
         .map(|threads| threads.into())
         .unwrap_or(4)
 }
-// On MacFUSE only a single channel will receive ENODEV when the file system is
-// unmounted and so all the other channels will block forever.
-// See https://github.com/osxfuse/osxfuse/issues/974
-#[cfg(all(feature = "fuse", target_os = "macos"))]
-fn default_threads() -> usize {
-    1
-}
-
-#[tokio::main]
-async fn main() -> Result<(), Box<dyn std::error::Error>> {
-    let cli = Cli::parse();
-
-    // configure log settings
-    let level = cli.log_level.unwrap_or(Level::INFO);
-
-    // Set up the tracing subscriber.
-    let subscriber = tracing_subscriber::registry().with(
-        tracing_subscriber::fmt::Layer::new()
-            .with_writer(std::io::stderr)
-            .compact()
-            .with_filter(
-                EnvFilter::builder()
-                    .with_default_directive(level.into())
-                    .from_env()
-                    .expect("invalid RUST_LOG"),
-            ),
-    );
-
-    // Add the otlp layer (when otlp is enabled, and it's not disabled in the CLI)
-    // then init the registry.
-    // If the feature is feature-flagged out, just init without adding the layer.
-    // It's necessary to do this separately, as every with() call chains the
-    // layer into the type of the registry.
-    #[cfg(feature = "otlp")]
-    {
-        let subscriber = if cli.otlp {
-            let tracer = opentelemetry_otlp::new_pipeline()
-                .tracing()
-                .with_exporter(opentelemetry_otlp::new_exporter().tonic())
-                .with_batch_config(BatchConfig::default())
-                .with_trace_config(opentelemetry_sdk::trace::config().with_resource({
-                    // use SdkProvidedResourceDetector.detect to detect resources,
-                    // but replace the default service name with our default.
-                    // https://github.com/open-telemetry/opentelemetry-rust/issues/1298
-                    let resources =
-                        SdkProvidedResourceDetector.detect(std::time::Duration::from_secs(0));
-                    // SdkProvidedResourceDetector currently always sets
-                    // `service.name`, but we don't like its default.
-                    if resources.get("service.name".into()).unwrap() == "unknown_service".into() {
-                        resources.merge(&Resource::new([KeyValue::new(
-                            "service.name",
-                            "tvix.store",
-                        )]))
-                    } else {
-                        resources
-                    }
-                }))
-                .install_batch(opentelemetry_sdk::runtime::Tokio)?;
-
-            // Create a tracing layer with the configured tracer
-            let layer = tracing_opentelemetry::layer().with_tracer(tracer);
-
-            subscriber.with(Some(layer))
-        } else {
-            subscriber.with(None)
-        };
-
-        subscriber.try_init()?;
-    }
-
-    // Init the registry (when otlp is not enabled)
-    #[cfg(not(feature = "otlp"))]
-    {
-        subscriber.try_init()?;
-    }
 
+#[instrument(skip_all)]
+async fn run_cli(cli: Cli) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
     match cli.command {
         Commands::Daemon {
-            listen_address,
-            blob_service_addr,
-            directory_service_addr,
-            path_info_service_addr,
+            listen_args,
+            service_addrs,
         } => {
             // initialize stores
             let (blob_service, directory_service, path_info_service, nar_calculation_service) =
-                tvix_store::utils::construct_services(
-                    blob_service_addr,
-                    directory_service_addr,
-                    path_info_service_addr,
-                )
-                .await?;
-
-            let listen_address = listen_address
-                .unwrap_or_else(|| "[::]:8000".to_string())
-                .parse()
-                .unwrap();
+                tvix_store::utils::construct_services(service_addrs).await?;
+
+            let mut server = Server::builder().layer(
+                ServiceBuilder::new()
+                    .layer(
+                        TraceLayer::new_for_grpc().make_span_with(
+                            DefaultMakeSpan::new()
+                                .level(Level::INFO)
+                                .include_headers(true),
+                        ),
+                    )
+                    .map_request(tvix_tracing::propagate::tonic::accept_trace),
+            );
 
-            let mut server = Server::builder();
+            let (_health_reporter, health_service) = tonic_health::server::health_reporter();
 
             #[allow(unused_mut)]
             let mut router = server
+                .add_service(health_service)
                 .add_service(BlobServiceServer::new(GRPCBlobServiceWrapper::new(
                     blob_service,
                 )))
@@ -311,47 +195,52 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
                     GRPCDirectoryServiceWrapper::new(directory_service),
                 ))
                 .add_service(PathInfoServiceServer::new(GRPCPathInfoServiceWrapper::new(
-                    Arc::from(path_info_service),
+                    path_info_service,
                     nar_calculation_service,
                 )));
 
             #[cfg(feature = "tonic-reflection")]
             {
-                let reflection_svc = tonic_reflection::server::Builder::configure()
-                    .register_encoded_file_descriptor_set(CASTORE_FILE_DESCRIPTOR_SET)
-                    .register_encoded_file_descriptor_set(FILE_DESCRIPTOR_SET)
-                    .build()?;
-                router = router.add_service(reflection_svc);
+                router = router.add_service(
+                    tonic_reflection::server::Builder::configure()
+                        .register_encoded_file_descriptor_set(CASTORE_FILE_DESCRIPTOR_SET)
+                        .register_encoded_file_descriptor_set(FILE_DESCRIPTOR_SET)
+                        .build_v1alpha()?,
+                );
+                router = router.add_service(
+                    tonic_reflection::server::Builder::configure()
+                        .register_encoded_file_descriptor_set(CASTORE_FILE_DESCRIPTOR_SET)
+                        .register_encoded_file_descriptor_set(FILE_DESCRIPTOR_SET)
+                        .build_v1()?,
+                );
             }
 
-            info!(listen_address=%listen_address, "starting daemon");
+            let listen_address = &listen_args.listen_address.unwrap_or_else(|| {
+                "[::]:8000"
+                    .parse()
+                    .expect("invalid fallback listen address")
+            });
 
-            let listener = Listener::bind(
-                &listen_address,
-                &SystemOptions::default(),
-                &UserOptions::default(),
+            let listener = tokio_listener::Listener::bind(
+                listen_address,
+                &Default::default(),
+                &listen_args.listener_options,
             )
             .await?;
 
+            info!(listen_address=%listen_address, "starting daemon");
+
             router.serve_with_incoming(listener).await?;
         }
         Commands::Import {
             paths,
-            blob_service_addr,
-            directory_service_addr,
-            path_info_service_addr,
+            service_addrs,
         } => {
             // FUTUREWORK: allow flat for single files?
             let (blob_service, directory_service, path_info_service, nar_calculation_service) =
-                tvix_store::utils::construct_services(
-                    blob_service_addr,
-                    directory_service_addr,
-                    path_info_service_addr,
-                )
-                .await?;
+                tvix_store::utils::construct_services(service_addrs).await?;
 
-            // Arc PathInfoService and NarCalculationService, as we clone it .
-            let path_info_service: Arc<dyn PathInfoService> = path_info_service.into();
+            // Arc NarCalculationService, as we clone it .
             let nar_calculation_service: Arc<dyn NarCalculationService> =
                 nar_calculation_service.into();
 
@@ -388,18 +277,11 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
             try_join_all(tasks).await?;
         }
         Commands::Copy {
-            blob_service_addr,
-            directory_service_addr,
-            path_info_service_addr,
+            service_addrs,
             reference_graph_path,
         } => {
             let (blob_service, directory_service, path_info_service, _nar_calculation_service) =
-                tvix_store::utils::construct_services(
-                    blob_service_addr,
-                    directory_service_addr,
-                    path_info_service_addr,
-                )
-                .await?;
+                tvix_store::utils::construct_services(service_addrs).await?;
 
             // Parse the file at reference_graph_path.
             let reference_graph_json = tokio::fs::read(&reference_graph_path).await?;
@@ -413,18 +295,26 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
             let reference_graph: ReferenceGraph<'_> =
                 serde_json::from_slice(reference_graph_json.as_slice())?;
 
-            // Arc the PathInfoService, as we clone it .
-            let path_info_service: Arc<dyn PathInfoService> = path_info_service.into();
+            let lookups_span = info_span!(
+                "lookup pathinfos",
+                "indicatif.pb_show" = tracing::field::Empty
+            );
+            lookups_span.pb_set_length(reference_graph.closure.len() as u64);
+            lookups_span.pb_set_style(&tvix_tracing::PB_PROGRESS_STYLE);
+            lookups_span.pb_start();
 
             // From our reference graph, lookup all pathinfos that might exist.
             let elems: Vec<_> = futures::stream::iter(reference_graph.closure)
                 .map(|elem| {
                     let path_info_service = path_info_service.clone();
                     async move {
-                        path_info_service
+                        let resp = path_info_service
                             .get(*elem.path.digest())
                             .await
-                            .map(|resp| (elem, resp))
+                            .map(|resp| (elem, resp));
+
+                        Span::current().pb_inc(1);
+                        resp
                     }
                 })
                 .buffer_unordered(50)
@@ -468,9 +358,10 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
                 // Create and upload a PathInfo pointing to the root_node,
                 // annotated with information we have from the reference graph.
                 let path_info = PathInfo {
-                    node: Some(tvix_castore::proto::Node {
-                        node: Some(root_node),
-                    }),
+                    node: Some(tvix_castore::proto::Node::from_name_and_node(
+                        elem.path.to_string().into(),
+                        root_node,
+                    )),
                     references: Vec::from_iter(
                         elem.references.iter().map(|e| e.digest().to_vec().into()),
                     ),
@@ -492,27 +383,20 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
         #[cfg(feature = "fuse")]
         Commands::Mount {
             dest,
-            blob_service_addr,
-            directory_service_addr,
-            path_info_service_addr,
+            service_addrs,
             list_root,
             threads,
             allow_other,
             show_xattr,
         } => {
             let (blob_service, directory_service, path_info_service, _nar_calculation_service) =
-                tvix_store::utils::construct_services(
-                    blob_service_addr,
-                    directory_service_addr,
-                    path_info_service_addr,
-                )
-                .await?;
+                tvix_store::utils::construct_services(service_addrs).await?;
 
-            let mut fuse_daemon = tokio::task::spawn_blocking(move || {
+            let fuse_daemon = tokio::task::spawn_blocking(move || {
                 let fs = make_fs(
                     blob_service,
                     directory_service,
-                    Arc::from(path_info_service),
+                    path_info_service,
                     list_root,
                     show_xattr,
                 );
@@ -522,39 +406,38 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
             })
             .await??;
 
-            // grab a handle to unmount the file system, and register a signal
-            // handler.
-            tokio::spawn(async move {
-                tokio::signal::ctrl_c().await.unwrap();
-                info!("interrupt received, unmountingโ€ฆ");
-                tokio::task::spawn_blocking(move || fuse_daemon.unmount()).await??;
-                info!("unmount occured, terminatingโ€ฆ");
-                Ok::<_, std::io::Error>(())
-            })
-            .await??;
+            // Wait for a ctrl_c and then call fuse_daemon.unmount().
+            tokio::spawn({
+                let fuse_daemon = fuse_daemon.clone();
+                async move {
+                    tokio::signal::ctrl_c().await.unwrap();
+                    info!("interrupt received, unmountingโ€ฆ");
+                    tokio::task::spawn_blocking(move || fuse_daemon.unmount()).await??;
+                    info!("unmount occured, terminatingโ€ฆ");
+                    Ok::<_, std::io::Error>(())
+                }
+            });
+
+            // Wait for the server to finish, which can either happen through it
+            // being unmounted externally, or receiving a signal invoking the
+            // handler above.
+            tokio::task::spawn_blocking(move || fuse_daemon.wait()).await?
         }
         #[cfg(feature = "virtiofs")]
         Commands::VirtioFs {
             socket,
-            blob_service_addr,
-            directory_service_addr,
-            path_info_service_addr,
+            service_addrs,
             list_root,
             show_xattr,
         } => {
             let (blob_service, directory_service, path_info_service, _nar_calculation_service) =
-                tvix_store::utils::construct_services(
-                    blob_service_addr,
-                    directory_service_addr,
-                    path_info_service_addr,
-                )
-                .await?;
+                tvix_store::utils::construct_services(service_addrs).await?;
 
             tokio::task::spawn_blocking(move || {
                 let fs = make_fs(
                     blob_service,
                     directory_service,
-                    Arc::from(path_info_service),
+                    path_info_service,
                     list_root,
                     show_xattr,
                 );
@@ -567,3 +450,36 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
     };
     Ok(())
 }
+
+#[tokio::main]
+async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
+    let cli = Cli::parse();
+
+    let tracing_handle = {
+        let mut builder = tvix_tracing::TracingBuilder::default();
+        builder = builder.level(cli.log_level).enable_progressbar();
+        #[cfg(feature = "otlp")]
+        {
+            if cli.otlp {
+                builder = builder.enable_otlp("tvix.store");
+            }
+        }
+        builder.build()?
+    };
+
+    tokio::select! {
+        res = tokio::signal::ctrl_c() => {
+            res?;
+            if let Err(e) = tracing_handle.force_shutdown().await {
+                eprintln!("failed to shutdown tracing: {e}");
+            }
+            Ok(())
+        },
+        res = run_cli(cli) => {
+            if let Err(e) = tracing_handle.shutdown().await {
+                eprintln!("failed to shutdown tracing: {e}");
+            }
+            res
+        }
+    }
+}
diff --git a/tvix/store/src/composition.rs b/tvix/store/src/composition.rs
new file mode 100644
index 000000000000..a32f22cf7796
--- /dev/null
+++ b/tvix/store/src/composition.rs
@@ -0,0 +1,22 @@
+use lazy_static::lazy_static;
+
+pub use tvix_castore::composition::*;
+
+lazy_static! {
+    /// The provided registry of tvix_store, which has all the builtin
+    /// tvix_castore (BlobStore/DirectoryStore) and tvix_store
+    /// (PathInfoService) implementations.
+    pub static ref REG: Registry = {
+        let mut reg = Default::default();
+        add_default_services(&mut reg);
+        reg
+    };
+}
+
+/// Register the builtin services of tvix_castore and tvix_store with the given
+/// registry. This is useful for creating your own registry with the builtin
+/// types _and_ extra third party types.
+pub fn add_default_services(reg: &mut Registry) {
+    tvix_castore::composition::add_default_services(reg);
+    crate::pathinfoservice::register_pathinfo_services(reg);
+}
diff --git a/tvix/store/src/import.rs b/tvix/store/src/import.rs
index 888380bca9a0..1719669a4285 100644
--- a/tvix/store/src/import.rs
+++ b/tvix/store/src/import.rs
@@ -1,13 +1,14 @@
+use bstr::ByteSlice;
 use std::path::Path;
 use tracing::{debug, instrument};
 use tvix_castore::{
-    blobservice::BlobService, directoryservice::DirectoryService, import::fs::ingest_path,
-    proto::node::Node, B3Digest,
+    blobservice::BlobService, directoryservice::DirectoryService, import::fs::ingest_path, Node,
+    PathComponent,
 };
 
 use nix_compat::{
     nixhash::{CAHash, NixHash},
-    store_path::{self, StorePath},
+    store_path::{self, StorePathRef},
 };
 
 use crate::{
@@ -27,29 +28,29 @@ impl From<CAHash> for nar_info::Ca {
     }
 }
 
-pub fn log_node(node: &Node, path: &Path) {
+pub fn log_node(name: &[u8], node: &Node, path: &Path) {
     match node {
-        Node::Directory(directory_node) => {
+        Node::Directory { digest, .. } => {
             debug!(
                 path = ?path,
-                name = ?directory_node.name,
-                digest = %B3Digest::try_from(directory_node.digest.clone()).unwrap(),
+                name = %name.as_bstr(),
+                digest = %digest,
                 "import successful",
             )
         }
-        Node::File(file_node) => {
+        Node::File { digest, .. } => {
             debug!(
                 path = ?path,
-                name = ?file_node.name,
-                digest = %B3Digest::try_from(file_node.digest.clone()).unwrap(),
+                name = %name.as_bstr(),
+                digest = %digest,
                 "import successful"
             )
         }
-        Node::Symlink(symlink_node) => {
+        Node::Symlink { target } => {
             debug!(
                 path = ?path,
-                name = ?symlink_node.name,
-                target = ?symlink_node.target,
+                name = %name.as_bstr(),
+                target = ?target,
                 "import successful"
             )
         }
@@ -81,14 +82,15 @@ pub fn path_to_name(path: &Path) -> std::io::Result<&str> {
 pub fn derive_nar_ca_path_info(
     nar_size: u64,
     nar_sha256: [u8; 32],
-    ca: Option<CAHash>,
+    ca: Option<&CAHash>,
+    name: bytes::Bytes,
     root_node: Node,
 ) -> PathInfo {
     // assemble the [crate::proto::PathInfo] object.
     PathInfo {
-        node: Some(tvix_castore::proto::Node {
-            node: Some(root_node),
-        }),
+        node: Some(tvix_castore::proto::Node::from_name_and_node(
+            name, root_node,
+        )),
         // There's no reference scanning on path contents ingested like this.
         references: vec![],
         narinfo: Some(NarInfo {
@@ -102,8 +104,9 @@ pub fn derive_nar_ca_path_info(
     }
 }
 
-/// Ingest the given path `path` and register the resulting output path in the
-/// [`PathInfoService`] as a recursive fixed output NAR.
+/// Ingest the contents at the given path `path` into castore, and registers the
+/// resulting root node in the passed PathInfoService, using the "NAR sha256
+/// digest" and the passed name for output path calculation.
 #[instrument(skip_all, fields(store_name=name, path=?path), err)]
 pub async fn import_path_as_nar_ca<BS, DS, PS, NS, P>(
     path: P,
@@ -112,7 +115,7 @@ pub async fn import_path_as_nar_ca<BS, DS, PS, NS, P>(
     directory_service: DS,
     path_info_service: PS,
     nar_calculation_service: NS,
-) -> Result<StorePath, std::io::Error>
+) -> Result<StorePathRef, std::io::Error>
 where
     P: AsRef<Path> + std::fmt::Debug,
     BS: BlobService + Clone,
@@ -137,23 +140,28 @@ where
         )
     })?;
 
-    // assemble a new root_node with a name that is derived from the nar hash.
-    let root_node = root_node.rename(output_path.to_string().into_bytes().into());
-    log_node(&root_node, path.as_ref());
+    let name: PathComponent = output_path
+        .to_string()
+        .as_str()
+        .try_into()
+        .expect("Tvix bug: StorePath must be PathComponent");
+
+    log_node(name.as_ref(), &root_node, path.as_ref());
 
     let path_info = derive_nar_ca_path_info(
         nar_size,
         nar_sha256,
-        Some(CAHash::Nar(NixHash::Sha256(nar_sha256))),
+        Some(&CAHash::Nar(NixHash::Sha256(nar_sha256))),
+        name.into(),
         root_node,
     );
 
     // This new [`PathInfo`] that we get back from there might contain additional signatures or
     // information set by the service itself. In this function, we silently swallow it because
-    // callers doesn't really need it.
+    // callers don't really need it.
     let _path_info = path_info_service.as_ref().put(path_info).await?;
 
-    Ok(output_path.to_owned())
+    Ok(output_path)
 }
 
 #[cfg(test)]
diff --git a/tvix/store/src/lib.rs b/tvix/store/src/lib.rs
index 8c32aaf885e8..81a77cd978a2 100644
--- a/tvix/store/src/lib.rs
+++ b/tvix/store/src/lib.rs
@@ -1,3 +1,4 @@
+pub mod composition;
 pub mod import;
 pub mod nar;
 pub mod pathinfoservice;
diff --git a/tvix/store/src/nar/import.rs b/tvix/store/src/nar/import.rs
index 36122d419d00..b9a15fe71384 100644
--- a/tvix/store/src/nar/import.rs
+++ b/tvix/store/src/nar/import.rs
@@ -1,5 +1,10 @@
 use nix_compat::nar::reader::r#async as nar_reader;
-use tokio::{io::AsyncBufRead, sync::mpsc, try_join};
+use sha2::Digest;
+use tokio::{
+    io::{AsyncBufRead, AsyncRead},
+    sync::mpsc,
+    try_join,
+};
 use tvix_castore::{
     blobservice::BlobService,
     directoryservice::DirectoryService,
@@ -7,12 +12,45 @@ use tvix_castore::{
         blobs::{self, ConcurrentBlobUploader},
         ingest_entries, IngestionEntry, IngestionError,
     },
-    proto::{node::Node, NamedNode},
-    PathBuf,
+    Node, PathBuf,
 };
 
 /// Ingests the contents from a [AsyncRead] providing NAR into the tvix store,
 /// interacting with a [BlobService] and [DirectoryService].
+/// Returns the castore root node, as well as the sha256 and size of the NAR
+/// contents ingested.
+pub async fn ingest_nar_and_hash<R, BS, DS>(
+    blob_service: BS,
+    directory_service: DS,
+    r: &mut R,
+) -> Result<(Node, [u8; 32], u64), IngestionError<Error>>
+where
+    R: AsyncRead + Unpin + Send,
+    BS: BlobService + Clone + 'static,
+    DS: DirectoryService,
+{
+    let mut nar_hash = sha2::Sha256::new();
+    let mut nar_size = 0;
+
+    // Assemble NarHash and NarSize as we read bytes.
+    let r = tokio_util::io::InspectReader::new(r, |b| {
+        nar_size += b.len() as u64;
+        use std::io::Write;
+        nar_hash.write_all(b).unwrap();
+    });
+
+    // HACK: InspectReader doesn't implement AsyncBufRead.
+    // See if this can be propagated through and we can then require our input
+    // reader to be buffered too.
+    let mut r = tokio::io::BufReader::new(r);
+
+    let root_node = ingest_nar(blob_service, directory_service, &mut r).await?;
+
+    Ok((root_node, nar_hash.finalize().into(), nar_size))
+}
+
+/// Ingests the contents from a [AsyncRead] providing NAR into the tvix store,
+/// interacting with a [BlobService] and [DirectoryService].
 /// It returns the castore root node or an error.
 pub async fn ingest_nar<R, BS, DS>(
     blob_service: BS,
@@ -59,9 +97,7 @@ where
 
     let (_, node) = try_join!(produce, consume)?;
 
-    // remove the fake "root" name again
-    debug_assert_eq!(&node.get_name(), b"root");
-    Ok(node.rename("".into()))
+    Ok(node)
 }
 
 async fn produce_nar_inner<BS>(
@@ -138,7 +174,7 @@ mod test {
         DIRECTORY_COMPLICATED, DIRECTORY_WITH_KEEP, EMPTY_BLOB_DIGEST, HELLOWORLD_BLOB_CONTENTS,
         HELLOWORLD_BLOB_DIGEST,
     };
-    use tvix_castore::proto as castorepb;
+    use tvix_castore::{Directory, Node};
 
     use crate::tests::fixtures::{
         blob_service, directory_service, NAR_CONTENTS_COMPLICATED, NAR_CONTENTS_HELLOWORLD,
@@ -160,10 +196,9 @@ mod test {
         .expect("must parse");
 
         assert_eq!(
-            castorepb::node::Node::Symlink(castorepb::SymlinkNode {
-                name: "".into(), // name must be empty
-                target: "/nix/store/somewhereelse".into(),
-            }),
+            Node::Symlink {
+                target: "/nix/store/somewhereelse".try_into().unwrap()
+            },
             root_node
         );
     }
@@ -183,12 +218,11 @@ mod test {
         .expect("must parse");
 
         assert_eq!(
-            castorepb::node::Node::File(castorepb::FileNode {
-                name: "".into(), // name must be empty
-                digest: HELLOWORLD_BLOB_DIGEST.clone().into(),
+            Node::File {
+                digest: HELLOWORLD_BLOB_DIGEST.clone(),
                 size: HELLOWORLD_BLOB_CONTENTS.len() as u64,
                 executable: false,
-            }),
+            },
             root_node
         );
 
@@ -211,11 +245,10 @@ mod test {
         .expect("must parse");
 
         assert_eq!(
-            castorepb::node::Node::Directory(castorepb::DirectoryNode {
-                name: "".into(), // name must be empty
-                digest: DIRECTORY_COMPLICATED.digest().into(),
-                size: DIRECTORY_COMPLICATED.size(),
-            }),
+            Node::Directory {
+                digest: DIRECTORY_COMPLICATED.digest(),
+                size: DIRECTORY_COMPLICATED.size()
+            },
             root_node,
         );
 
@@ -223,7 +256,7 @@ mod test {
         assert!(blob_service.has(&EMPTY_BLOB_DIGEST).await.unwrap());
 
         // directoryservice must contain the directories, at least with get_recursive.
-        let resp: Result<Vec<castorepb::Directory>, _> = directory_service
+        let resp: Result<Vec<Directory>, _> = directory_service
             .get_recursive(&DIRECTORY_COMPLICATED.digest())
             .collect()
             .await;
diff --git a/tvix/store/src/nar/mod.rs b/tvix/store/src/nar/mod.rs
index 164748a655e8..da798bbf3a3c 100644
--- a/tvix/store/src/nar/mod.rs
+++ b/tvix/store/src/nar/mod.rs
@@ -4,19 +4,18 @@ use tvix_castore::B3Digest;
 mod import;
 mod renderer;
 pub use import::ingest_nar;
+pub use import::ingest_nar_and_hash;
 pub use renderer::calculate_size_and_sha256;
 pub use renderer::write_nar;
 pub use renderer::SimpleRenderer;
-use tvix_castore::proto as castorepb;
+use tvix_castore::Node;
 
 #[async_trait]
 pub trait NarCalculationService: Send + Sync {
     /// Return the nar size and nar sha256 digest for a given root node.
     /// This can be used to calculate NAR-based output paths.
-    async fn calculate_nar(
-        &self,
-        root_node: &castorepb::node::Node,
-    ) -> Result<(u64, [u8; 32]), tvix_castore::Error>;
+    async fn calculate_nar(&self, root_node: &Node)
+        -> Result<(u64, [u8; 32]), tvix_castore::Error>;
 }
 
 #[async_trait]
@@ -26,7 +25,7 @@ where
 {
     async fn calculate_nar(
         &self,
-        root_node: &castorepb::node::Node,
+        root_node: &Node,
     ) -> Result<(u64, [u8; 32]), tvix_castore::Error> {
         self.as_ref().calculate_nar(root_node).await
     }
@@ -38,13 +37,13 @@ pub enum RenderError {
     #[error("failure talking to a backing store client: {0}")]
     StoreError(#[source] std::io::Error),
 
-    #[error("unable to find directory {}, referred from {:?}", .0, .1)]
+    #[error("unable to find directory {0}, referred from {1:?}")]
     DirectoryNotFound(B3Digest, bytes::Bytes),
 
-    #[error("unable to find blob {}, referred from {:?}", .0, .1)]
+    #[error("unable to find blob {0}, referred from {1:?}")]
     BlobNotFound(B3Digest, bytes::Bytes),
 
-    #[error("unexpected size in metadata for blob {}, referred from {:?} returned, expected {}, got {}", .0, .1, .2, .3)]
+    #[error("unexpected size in metadata for blob {0}, referred from {1:?} returned, expected {2}, got {3}")]
     UnexpectedBlobMeta(B3Digest, bytes::Bytes, u32, u32),
 
     #[error("failure using the NAR writer: {0}")]
diff --git a/tvix/store/src/nar/renderer.rs b/tvix/store/src/nar/renderer.rs
index efd67671db70..afd85f267c6c 100644
--- a/tvix/store/src/nar/renderer.rs
+++ b/tvix/store/src/nar/renderer.rs
@@ -6,11 +6,9 @@ use nix_compat::nar::writer::r#async as nar_writer;
 use sha2::{Digest, Sha256};
 use tokio::io::{self, AsyncWrite, BufReader};
 use tonic::async_trait;
-use tvix_castore::{
-    blobservice::BlobService,
-    directoryservice::DirectoryService,
-    proto::{self as castorepb, NamedNode},
-};
+use tracing::{instrument, Span};
+use tracing_indicatif::span_ext::IndicatifSpanExt;
+use tvix_castore::{blobservice::BlobService, directoryservice::DirectoryService, Node};
 
 pub struct SimpleRenderer<BS, DS> {
     blob_service: BS,
@@ -34,7 +32,7 @@ where
 {
     async fn calculate_nar(
         &self,
-        root_node: &castorepb::node::Node,
+        root_node: &Node,
     ) -> Result<(u64, [u8; 32]), tvix_castore::Error> {
         calculate_size_and_sha256(
             root_node,
@@ -48,8 +46,9 @@ where
 
 /// Invoke [write_nar], and return the size and sha256 digest of the produced
 /// NAR output.
+#[instrument(skip_all, fields(indicatif.pb_show=1))]
 pub async fn calculate_size_and_sha256<BS, DS>(
-    root_node: &castorepb::node::Node,
+    root_node: &Node,
     blob_service: BS,
     directory_service: DS,
 ) -> Result<(u64, [u8; 32]), RenderError>
@@ -60,6 +59,10 @@ where
     let mut h = Sha256::new();
     let mut cw = CountWrite::from(&mut h);
 
+    let span = Span::current();
+    span.pb_set_message("Calculating NAR");
+    span.pb_start();
+
     write_nar(
         // The hasher doesn't speak async. It doesn't
         // actually do any I/O, so it's fine to wrap.
@@ -73,13 +76,13 @@ where
     Ok((cw.count(), h.finalize().into()))
 }
 
-/// Accepts a [castorepb::node::Node] pointing to the root of a (store) path,
+/// Accepts a [Node] pointing to the root of a (store) path,
 /// and uses the passed blob_service and directory_service to perform the
 /// necessary lookups as it traverses the structure.
 /// The contents in NAR serialization are writen to the passed [AsyncWrite].
 pub async fn write_nar<W, BS, DS>(
     mut w: W,
-    proto_root_node: &castorepb::node::Node,
+    root_node: &Node,
     blob_service: BS,
     directory_service: DS,
 ) -> Result<(), RenderError>
@@ -95,7 +98,8 @@ where
 
     walk_node(
         nar_root_node,
-        proto_root_node,
+        root_node,
+        b"",
         blob_service,
         directory_service,
     )
@@ -108,7 +112,8 @@ where
 /// This consumes the node.
 async fn walk_node<BS, DS>(
     nar_node: nar_writer::Node<'_, '_>,
-    proto_node: &castorepb::node::Node,
+    castore_node: &Node,
+    name: &[u8],
     blob_service: BS,
     directory_service: DS,
 ) -> Result<(BS, DS), RenderError>
@@ -116,24 +121,20 @@ where
     BS: BlobService + Send,
     DS: DirectoryService + Send,
 {
-    match proto_node {
-        castorepb::node::Node::Symlink(proto_symlink_node) => {
+    match castore_node {
+        Node::Symlink { target, .. } => {
             nar_node
-                .symlink(&proto_symlink_node.target)
+                .symlink(target.as_ref())
                 .await
                 .map_err(RenderError::NARWriterError)?;
         }
-        castorepb::node::Node::File(proto_file_node) => {
-            let digest_len = proto_file_node.digest.len();
-            let digest = proto_file_node.digest.clone().try_into().map_err(|_| {
-                RenderError::StoreError(io::Error::new(
-                    io::ErrorKind::Other,
-                    format!("invalid digest len {} in file node", digest_len),
-                ))
-            })?;
-
+        Node::File {
+            digest,
+            size,
+            executable,
+        } => {
             let mut blob_reader = match blob_service
-                .open_read(&digest)
+                .open_read(digest)
                 .await
                 .map_err(RenderError::StoreError)?
             {
@@ -145,39 +146,23 @@ where
             }?;
 
             nar_node
-                .file(
-                    proto_file_node.executable,
-                    proto_file_node.size,
-                    &mut blob_reader,
-                )
+                .file(*executable, *size, &mut blob_reader)
                 .await
                 .map_err(RenderError::NARWriterError)?;
         }
-        castorepb::node::Node::Directory(proto_directory_node) => {
-            let digest_len = proto_directory_node.digest.len();
-            let digest = proto_directory_node
-                .digest
-                .clone()
-                .try_into()
-                .map_err(|_| {
-                    RenderError::StoreError(io::Error::new(
-                        io::ErrorKind::InvalidData,
-                        format!("invalid digest len {} in directory node", digest_len),
-                    ))
-                })?;
-
+        Node::Directory { digest, .. } => {
             // look it up with the directory service
             match directory_service
-                .get(&digest)
+                .get(digest)
                 .await
                 .map_err(|e| RenderError::StoreError(e.into()))?
             {
                 // if it's None, that's an error!
                 None => Err(RenderError::DirectoryNotFound(
-                    digest,
-                    proto_directory_node.name.clone(),
+                    digest.clone(),
+                    bytes::Bytes::copy_from_slice(name),
                 ))?,
-                Some(proto_directory) => {
+                Some(directory) => {
                     // start a directory node
                     let mut nar_node_directory = nar_node
                         .directory()
@@ -191,15 +176,16 @@ where
 
                     // for each node in the directory, create a new entry with its name,
                     // and then recurse on that entry.
-                    for proto_node in proto_directory.nodes() {
+                    for (name, node) in directory.nodes() {
                         let child_node = nar_node_directory
-                            .entry(proto_node.get_name())
+                            .entry(name.as_ref())
                             .await
                             .map_err(RenderError::NARWriterError)?;
 
                         (blob_service, directory_service) = Box::pin(walk_node(
                             child_node,
-                            &proto_node,
+                            node,
+                            name.as_ref(),
                             blob_service,
                             directory_service,
                         ))
diff --git a/tvix/store/src/pathinfoservice/bigtable.rs b/tvix/store/src/pathinfoservice/bigtable.rs
index 707a686c0a54..15128986ff56 100644
--- a/tvix/store/src/pathinfoservice/bigtable.rs
+++ b/tvix/store/src/pathinfoservice/bigtable.rs
@@ -10,15 +10,17 @@ use nix_compat::nixbase32;
 use prost::Message;
 use serde::{Deserialize, Serialize};
 use serde_with::{serde_as, DurationSeconds};
+use std::sync::Arc;
 use tonic::async_trait;
 use tracing::{instrument, trace};
+use tvix_castore::composition::{CompositionContext, ServiceBuilder};
 use tvix_castore::Error;
 
 /// There should not be more than 10 MiB in a single cell.
 /// https://cloud.google.com/bigtable/docs/schema-design#cells
 const CELL_SIZE_LIMIT: u64 = 10 * 1024 * 1024;
 
-/// Provides a [DirectoryService] implementation using
+/// Provides a [PathInfoService] implementation using
 /// [Bigtable](https://cloud.google.com/bigtable/docs/)
 /// as an underlying K/V store.
 ///
@@ -44,57 +46,6 @@ pub struct BigtablePathInfoService {
     emulator: std::sync::Arc<(tempfile::TempDir, async_process::Child)>,
 }
 
-/// Represents configuration of [BigtablePathInfoService].
-/// This currently conflates both connect parameters and data model/client
-/// behaviour parameters.
-#[serde_as]
-#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
-pub struct BigtableParameters {
-    project_id: String,
-    instance_name: String,
-    #[serde(default)]
-    is_read_only: bool,
-    #[serde(default = "default_channel_size")]
-    channel_size: usize,
-
-    #[serde_as(as = "Option<DurationSeconds<String>>")]
-    #[serde(default = "default_timeout")]
-    timeout: Option<std::time::Duration>,
-    table_name: String,
-    family_name: String,
-
-    #[serde(default = "default_app_profile_id")]
-    app_profile_id: String,
-}
-
-impl BigtableParameters {
-    #[cfg(test)]
-    pub fn default_for_tests() -> Self {
-        Self {
-            project_id: "project-1".into(),
-            instance_name: "instance-1".into(),
-            is_read_only: false,
-            channel_size: default_channel_size(),
-            timeout: default_timeout(),
-            table_name: "table-1".into(),
-            family_name: "cf1".into(),
-            app_profile_id: default_app_profile_id(),
-        }
-    }
-}
-
-fn default_app_profile_id() -> String {
-    "default".to_owned()
-}
-
-fn default_channel_size() -> usize {
-    4
-}
-
-fn default_timeout() -> Option<std::time::Duration> {
-    Some(std::time::Duration::from_secs(4))
-}
-
 impl BigtablePathInfoService {
     #[cfg(not(test))]
     pub async fn connect(params: BigtableParameters) -> Result<Self, bigtable::Error> {
@@ -398,7 +349,9 @@ impl PathInfoService for BigtablePathInfoService {
                     .validate()
                     .map_err(|e| Error::StorageError(format!("invalid PathInfo: {}", e)))?;
 
-                if store_path.digest().as_slice() != row_key.as_slice() {
+                let exp_path_info_key = derive_pathinfo_key(store_path.digest());
+
+                if exp_path_info_key.as_bytes() != row_key.as_slice() {
                     Err(Error::StorageError("PathInfo has unexpected digest".into()))?
                 }
 
@@ -410,3 +363,88 @@ impl PathInfoService for BigtablePathInfoService {
         Box::pin(stream)
     }
 }
+
+/// Represents configuration of [BigtablePathInfoService].
+/// This currently conflates both connect parameters and data model/client
+/// behaviour parameters.
+#[serde_as]
+#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
+pub struct BigtableParameters {
+    project_id: String,
+    instance_name: String,
+    #[serde(default)]
+    is_read_only: bool,
+    #[serde(default = "default_channel_size")]
+    channel_size: usize,
+
+    #[serde_as(as = "Option<DurationSeconds<String>>")]
+    #[serde(default = "default_timeout")]
+    timeout: Option<std::time::Duration>,
+    table_name: String,
+    family_name: String,
+
+    #[serde(default = "default_app_profile_id")]
+    app_profile_id: String,
+}
+
+impl BigtableParameters {
+    #[cfg(test)]
+    pub fn default_for_tests() -> Self {
+        Self {
+            project_id: "project-1".into(),
+            instance_name: "instance-1".into(),
+            is_read_only: false,
+            channel_size: default_channel_size(),
+            timeout: default_timeout(),
+            table_name: "table-1".into(),
+            family_name: "cf1".into(),
+            app_profile_id: default_app_profile_id(),
+        }
+    }
+}
+
+fn default_app_profile_id() -> String {
+    "default".to_owned()
+}
+
+fn default_channel_size() -> usize {
+    4
+}
+
+fn default_timeout() -> Option<std::time::Duration> {
+    Some(std::time::Duration::from_secs(4))
+}
+
+#[async_trait]
+impl ServiceBuilder for BigtableParameters {
+    type Output = dyn PathInfoService;
+    async fn build<'a>(
+        &'a self,
+        _instance_name: &str,
+        _context: &CompositionContext,
+    ) -> Result<Arc<dyn PathInfoService>, Box<dyn std::error::Error + Send + Sync>> {
+        Ok(Arc::new(
+            BigtablePathInfoService::connect(self.clone()).await?,
+        ))
+    }
+}
+
+impl TryFrom<url::Url> for BigtableParameters {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    fn try_from(mut url: url::Url) -> Result<Self, Self::Error> {
+        // parse the instance name from the hostname.
+        let instance_name = url
+            .host_str()
+            .ok_or_else(|| Error::StorageError("instance name missing".into()))?
+            .to_string();
+
+        // โ€ฆ but add it to the query string now, so we just need to parse that.
+        url.query_pairs_mut()
+            .append_pair("instance_name", &instance_name);
+
+        let params: BigtableParameters = serde_qs::from_str(url.query().unwrap_or_default())
+            .map_err(|e| Error::InvalidRequest(format!("failed to parse parameters: {}", e)))?;
+
+        Ok(params)
+    }
+}
diff --git a/tvix/store/src/pathinfoservice/combinators.rs b/tvix/store/src/pathinfoservice/combinators.rs
index 664144ef494b..bb5595f72b10 100644
--- a/tvix/store/src/pathinfoservice/combinators.rs
+++ b/tvix/store/src/pathinfoservice/combinators.rs
@@ -1,8 +1,11 @@
+use std::sync::Arc;
+
 use crate::proto::PathInfo;
 use futures::stream::BoxStream;
 use nix_compat::nixbase32;
 use tonic::async_trait;
 use tracing::{debug, instrument};
+use tvix_castore::composition::{CompositionContext, ServiceBuilder};
 use tvix_castore::Error;
 
 use super::PathInfoService;
@@ -61,6 +64,41 @@ where
     }
 }
 
+#[derive(serde::Deserialize)]
+pub struct CacheConfig {
+    pub near: String,
+    pub far: String,
+}
+
+impl TryFrom<url::Url> for CacheConfig {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    fn try_from(_url: url::Url) -> Result<Self, Self::Error> {
+        Err(Error::StorageError(
+            "Instantiating a CombinedPathInfoService from a url is not supported".into(),
+        )
+        .into())
+    }
+}
+
+#[async_trait]
+impl ServiceBuilder for CacheConfig {
+    type Output = dyn PathInfoService;
+    async fn build<'a>(
+        &'a self,
+        _instance_name: &str,
+        context: &CompositionContext,
+    ) -> Result<Arc<dyn PathInfoService>, Box<dyn std::error::Error + Send + Sync + 'static>> {
+        let (near, far) = futures::join!(
+            context.resolve(self.near.clone()),
+            context.resolve(self.far.clone())
+        );
+        Ok(Arc::new(Cache {
+            near: near?,
+            far: far?,
+        }))
+    }
+}
+
 #[cfg(test)]
 mod test {
     use std::num::NonZeroUsize;
diff --git a/tvix/store/src/pathinfoservice/from_addr.rs b/tvix/store/src/pathinfoservice/from_addr.rs
index 455909e7f235..d4719219b996 100644
--- a/tvix/store/src/pathinfoservice/from_addr.rs
+++ b/tvix/store/src/pathinfoservice/from_addr.rs
@@ -1,13 +1,10 @@
-use crate::proto::path_info_service_client::PathInfoServiceClient;
+use super::PathInfoService;
 
-use super::{
-    GRPCPathInfoService, MemoryPathInfoService, NixHTTPPathInfoService, PathInfoService,
-    SledPathInfoService,
+use crate::composition::{
+    with_registry, CompositionContext, DeserializeWithRegistry, ServiceBuilder, REG,
 };
-
-use nix_compat::narinfo;
 use std::sync::Arc;
-use tvix_castore::{blobservice::BlobService, directoryservice::DirectoryService, Error};
+use tvix_castore::Error;
 use url::Url;
 
 /// Constructs a new instance of a [PathInfoService] from an URI.
@@ -20,6 +17,11 @@ use url::Url;
 /// - `sled:///absolute/path/to/somewhere`
 ///   Uses sled, using a path on the disk for persistency. Can be only opened
 ///   from one process at the same time.
+/// - `redb:`
+///   Uses a in-memory redb implementation.
+/// - `redb:///absolute/path/to/somewhere`
+///   Uses redb, using a path on the disk for persistency. Can be only opened
+///   from one process at the same time.
 /// - `nix+https://cache.nixos.org?trusted-public-keys=cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=`
 ///   Exposes the Nix binary cache as a PathInfoService, ingesting NARs into the
 ///   {Blob,Directory}Service. You almost certainly want to use this with some cache.
@@ -34,110 +36,21 @@ use url::Url;
 /// these also need to be passed in.
 pub async fn from_addr(
     uri: &str,
-    blob_service: Arc<dyn BlobService>,
-    directory_service: Arc<dyn DirectoryService>,
-) -> Result<Box<dyn PathInfoService>, Error> {
+    context: Option<&CompositionContext<'_>>,
+) -> Result<Arc<dyn PathInfoService>, Box<dyn std::error::Error + Send + Sync>> {
     #[allow(unused_mut)]
     let mut url =
         Url::parse(uri).map_err(|e| Error::StorageError(format!("unable to parse url: {}", e)))?;
 
-    let path_info_service: Box<dyn PathInfoService> = match url.scheme() {
-        "memory" => {
-            // memory doesn't support host or path in the URL.
-            if url.has_host() || !url.path().is_empty() {
-                return Err(Error::StorageError("invalid url".to_string()));
-            }
-            Box::<MemoryPathInfoService>::default()
-        }
-        "sled" => {
-            // sled doesn't support host, and a path can be provided (otherwise
-            // it'll live in memory only).
-            if url.has_host() {
-                return Err(Error::StorageError("no host allowed".to_string()));
-            }
-
-            if url.path() == "/" {
-                return Err(Error::StorageError(
-                    "cowardly refusing to open / with sled".to_string(),
-                ));
-            }
-
-            // TODO: expose other parameters as URL parameters?
-
-            Box::new(if url.path().is_empty() {
-                SledPathInfoService::new_temporary()
-                    .map_err(|e| Error::StorageError(e.to_string()))?
-            } else {
-                SledPathInfoService::new(url.path())
-                    .map_err(|e| Error::StorageError(e.to_string()))?
-            })
-        }
-        "nix+http" | "nix+https" => {
-            // Stringify the URL and remove the nix+ prefix.
-            // We can't use `url.set_scheme(rest)`, as it disallows
-            // setting something http(s) that previously wasn't.
-            let new_url = Url::parse(url.to_string().strip_prefix("nix+").unwrap()).unwrap();
-
-            let mut nix_http_path_info_service =
-                NixHTTPPathInfoService::new(new_url, blob_service, directory_service);
-
-            let pairs = &url.query_pairs();
-            for (k, v) in pairs.into_iter() {
-                if k == "trusted-public-keys" {
-                    let pubkey_strs: Vec<_> = v.split_ascii_whitespace().collect();
-
-                    let mut pubkeys: Vec<narinfo::PubKey> = Vec::with_capacity(pubkey_strs.len());
-                    for pubkey_str in pubkey_strs {
-                        pubkeys.push(narinfo::PubKey::parse(pubkey_str).map_err(|e| {
-                            Error::StorageError(format!("invalid public key: {e}"))
-                        })?);
-                    }
-
-                    nix_http_path_info_service.set_public_keys(pubkeys);
-                }
-            }
-
-            Box::new(nix_http_path_info_service)
-        }
-        scheme if scheme.starts_with("grpc+") => {
-            // schemes starting with grpc+ go to the GRPCPathInfoService.
-            //   That's normally grpc+unix for unix sockets, and grpc+http(s) for the HTTP counterparts.
-            // - In the case of unix sockets, there must be a path, but may not be a host.
-            // - In the case of non-unix sockets, there must be a host, but no path.
-            // Constructing the channel is handled by tvix_castore::channel::from_url.
-            let client =
-                PathInfoServiceClient::new(tvix_castore::tonic::channel_from_url(&url).await?);
-            Box::new(GRPCPathInfoService::from_client(client))
-        }
-        #[cfg(feature = "cloud")]
-        "bigtable" => {
-            use super::bigtable::BigtableParameters;
-            use super::BigtablePathInfoService;
-
-            // parse the instance name from the hostname.
-            let instance_name = url
-                .host_str()
-                .ok_or_else(|| Error::StorageError("instance name missing".into()))?
-                .to_string();
-
-            // โ€ฆ but add it to the query string now, so we just need to parse that.
-            url.query_pairs_mut()
-                .append_pair("instance_name", &instance_name);
-
-            let params: BigtableParameters = serde_qs::from_str(url.query().unwrap_or_default())
-                .map_err(|e| Error::InvalidRequest(format!("failed to parse parameters: {}", e)))?;
-
-            Box::new(
-                BigtablePathInfoService::connect(params)
-                    .await
-                    .map_err(|e| Error::StorageError(e.to_string()))?,
-            )
-        }
-        _ => Err(Error::StorageError(format!(
-            "unknown scheme: {}",
-            url.scheme()
-        )))?,
-    };
+    let path_info_service_config = with_registry(&REG, || {
+        <DeserializeWithRegistry<Box<dyn ServiceBuilder<Output = dyn PathInfoService>>>>::try_from(
+            url,
+        )
+    })?
+    .0;
+    let path_info_service = path_info_service_config
+        .build("anonymous", context.unwrap_or(&CompositionContext::blank()))
+        .await?;
 
     Ok(path_info_service)
 }
@@ -145,18 +58,18 @@ pub async fn from_addr(
 #[cfg(test)]
 mod tests {
     use super::from_addr;
+    use crate::composition::{Composition, DeserializeWithRegistry, ServiceBuilder};
     use lazy_static::lazy_static;
     use rstest::rstest;
-    use std::sync::Arc;
     use tempfile::TempDir;
-    use tvix_castore::{
-        blobservice::{BlobService, MemoryBlobService},
-        directoryservice::{DirectoryService, MemoryDirectoryService},
-    };
+    use tvix_castore::blobservice::{BlobService, MemoryBlobServiceConfig};
+    use tvix_castore::directoryservice::{DirectoryService, MemoryDirectoryServiceConfig};
 
     lazy_static! {
         static ref TMPDIR_SLED_1: TempDir = TempDir::new().unwrap();
         static ref TMPDIR_SLED_2: TempDir = TempDir::new().unwrap();
+        static ref TMPDIR_REDB_1: TempDir = TempDir::new().unwrap();
+        static ref TMPDIR_REDB_2: TempDir = TempDir::new().unwrap();
     }
 
     // the gRPC tests below don't fail, because we connect lazily.
@@ -182,6 +95,14 @@ mod tests {
     #[case::memory_invalid_root_path("memory:///", false)]
     /// This sets a memory url path to "/foo", which is invalid.
     #[case::memory_invalid_root_path_foo("memory:///foo", false)]
+    /// redb with a host, and a valid path path, which should fail.
+    #[case::redb_invalid_host_with_valid_path(&format!("redb://foo.example{}", &TMPDIR_REDB_1.path().join("bar").to_str().unwrap()), false)]
+    /// redb with / as path, which should fail.
+    #[case::redb_invalid_root("redb:///", false)]
+    /// This configures redb with a valid path, which should succeed.
+    #[case::redb_valid_path(&format!("redb://{}", &TMPDIR_REDB_2.path().join("foo").to_str().unwrap()), true)]
+    /// redb using the in-memory backend, which should succeed.
+    #[case::redb_valid_in_memory("redb://", true)]
     /// Correct Scheme for the cache.nixos.org binary cache.
     #[case::correct_nix_https("nix+https://cache.nixos.org", true)]
     /// Correct Scheme for the cache.nixos.org binary cache (HTTP URL).
@@ -221,11 +142,19 @@ mod tests {
     )]
     #[tokio::test]
     async fn test_from_addr_tokio(#[case] uri_str: &str, #[case] exp_succeed: bool) {
-        let blob_service: Arc<dyn BlobService> = Arc::from(MemoryBlobService::default());
-        let directory_service: Arc<dyn DirectoryService> =
-            Arc::from(MemoryDirectoryService::default());
-
-        let resp = from_addr(uri_str, blob_service, directory_service).await;
+        let mut comp = Composition::default();
+        comp.extend(vec![(
+            "default".into(),
+            DeserializeWithRegistry(Box::new(MemoryBlobServiceConfig {})
+                as Box<dyn ServiceBuilder<Output = dyn BlobService>>),
+        )]);
+        comp.extend(vec![(
+            "default".into(),
+            DeserializeWithRegistry(Box::new(MemoryDirectoryServiceConfig {})
+                as Box<dyn ServiceBuilder<Output = dyn DirectoryService>>),
+        )]);
+
+        let resp = from_addr(uri_str, Some(&comp.context())).await;
 
         if exp_succeed {
             resp.expect("should succeed");
diff --git a/tvix/store/src/pathinfoservice/fs/mod.rs b/tvix/store/src/pathinfoservice/fs/mod.rs
index aa64b1c01f16..1f7fa8a8afce 100644
--- a/tvix/store/src/pathinfoservice/fs/mod.rs
+++ b/tvix/store/src/pathinfoservice/fs/mod.rs
@@ -1,10 +1,10 @@
 use futures::stream::BoxStream;
 use futures::StreamExt;
+use nix_compat::store_path::StorePathRef;
 use tonic::async_trait;
 use tvix_castore::fs::{RootNodes, TvixStoreFs};
-use tvix_castore::proto as castorepb;
-use tvix_castore::Error;
 use tvix_castore::{blobservice::BlobService, directoryservice::DirectoryService};
+use tvix_castore::{Error, Node, PathComponent};
 
 use super::PathInfoService;
 
@@ -48,8 +48,8 @@ impl<T> RootNodes for RootNodesWrapper<T>
 where
     T: AsRef<dyn PathInfoService> + Send + Sync,
 {
-    async fn get_by_basename(&self, name: &[u8]) -> Result<Option<castorepb::node::Node>, Error> {
-        let Ok(store_path) = nix_compat::store_path::StorePath::from_bytes(name) else {
+    async fn get_by_basename(&self, name: &PathComponent) -> Result<Option<Node>, Error> {
+        let Ok(store_path) = StorePathRef::from_bytes(name.as_ref()) else {
             return Ok(None);
         };
 
@@ -59,22 +59,31 @@ where
             .get(*store_path.digest())
             .await?
             .map(|path_info| {
-                path_info
+                let node = path_info
                     .node
+                    .as_ref()
                     .expect("missing root node")
-                    .node
-                    .expect("empty node")
-            }))
+                    .to_owned();
+
+                match node.into_name_and_node() {
+                    Ok((_name, node)) => Ok(node),
+                    Err(e) => Err(Error::StorageError(e.to_string())),
+                }
+            })
+            .transpose()?)
     }
 
-    fn list(&self) -> BoxStream<Result<castorepb::node::Node, Error>> {
+    fn list(&self) -> BoxStream<Result<(PathComponent, Node), Error>> {
         Box::pin(self.0.as_ref().list().map(|result| {
-            result.map(|path_info| {
-                path_info
+            result.and_then(|path_info| {
+                let node = path_info
                     .node
+                    .as_ref()
                     .expect("missing root node")
-                    .node
-                    .expect("empty node")
+                    .to_owned();
+
+                node.into_name_and_node()
+                    .map_err(|e| Error::StorageError(e.to_string()))
             })
         }))
     }
diff --git a/tvix/store/src/pathinfoservice/grpc.rs b/tvix/store/src/pathinfoservice/grpc.rs
index f6a356cf180a..7510ccd911f0 100644
--- a/tvix/store/src/pathinfoservice/grpc.rs
+++ b/tvix/store/src/pathinfoservice/grpc.rs
@@ -6,30 +6,40 @@ use crate::{
 use async_stream::try_stream;
 use futures::stream::BoxStream;
 use nix_compat::nixbase32;
-use tonic::{async_trait, transport::Channel, Code};
-use tracing::instrument;
-use tvix_castore::{proto as castorepb, Error};
+use std::sync::Arc;
+use tonic::{async_trait, Code};
+use tracing::{instrument, Span};
+use tracing_indicatif::span_ext::IndicatifSpanExt;
+use tvix_castore::composition::{CompositionContext, ServiceBuilder};
+use tvix_castore::Error;
+use tvix_castore::Node;
 
 /// Connects to a (remote) tvix-store PathInfoService over gRPC.
 #[derive(Clone)]
-pub struct GRPCPathInfoService {
+pub struct GRPCPathInfoService<T> {
     /// The internal reference to a gRPC client.
     /// Cloning it is cheap, and it internally handles concurrent requests.
-    grpc_client: proto::path_info_service_client::PathInfoServiceClient<Channel>,
+    grpc_client: proto::path_info_service_client::PathInfoServiceClient<T>,
 }
 
-impl GRPCPathInfoService {
+impl<T> GRPCPathInfoService<T> {
     /// construct a [GRPCPathInfoService] from a [proto::path_info_service_client::PathInfoServiceClient].
     /// panics if called outside the context of a tokio runtime.
     pub fn from_client(
-        grpc_client: proto::path_info_service_client::PathInfoServiceClient<Channel>,
+        grpc_client: proto::path_info_service_client::PathInfoServiceClient<T>,
     ) -> Self {
         Self { grpc_client }
     }
 }
 
 #[async_trait]
-impl PathInfoService for GRPCPathInfoService {
+impl<T> PathInfoService for GRPCPathInfoService<T>
+where
+    T: tonic::client::GrpcService<tonic::body::BoxBody> + Send + Sync + Clone + 'static,
+    T::ResponseBody: tonic::codegen::Body<Data = tonic::codegen::Bytes> + Send + 'static,
+    <T::ResponseBody as tonic::codegen::Body>::Error: Into<tonic::codegen::StdError> + Send,
+    T::Future: Send,
+{
     #[instrument(level = "trace", skip_all, fields(path_info.digest = nixbase32::encode(&digest)))]
     async fn get(&self, digest: [u8; 20]) -> Result<Option<PathInfo>, Error> {
         let path_info = self
@@ -106,18 +116,26 @@ impl PathInfoService for GRPCPathInfoService {
 }
 
 #[async_trait]
-impl NarCalculationService for GRPCPathInfoService {
-    #[instrument(level = "trace", skip_all, fields(root_node = ?root_node))]
-    async fn calculate_nar(
-        &self,
-        root_node: &castorepb::node::Node,
-    ) -> Result<(u64, [u8; 32]), Error> {
+impl<T> NarCalculationService for GRPCPathInfoService<T>
+where
+    T: tonic::client::GrpcService<tonic::body::BoxBody> + Send + Sync + Clone + 'static,
+    T::ResponseBody: tonic::codegen::Body<Data = tonic::codegen::Bytes> + Send + 'static,
+    <T::ResponseBody as tonic::codegen::Body>::Error: Into<tonic::codegen::StdError> + Send,
+    T::Future: Send,
+{
+    #[instrument(level = "trace", skip_all, fields(root_node = ?root_node, indicatif.pb_show=1))]
+    async fn calculate_nar(&self, root_node: &Node) -> Result<(u64, [u8; 32]), Error> {
+        let span = Span::current();
+        span.pb_set_message("Waiting for NAR calculation");
+        span.pb_start();
+
         let path_info = self
             .grpc_client
             .clone()
-            .calculate_nar(castorepb::Node {
-                node: Some(root_node.clone()),
-            })
+            .calculate_nar(tvix_castore::proto::Node::from_name_and_node(
+                "".into(),
+                root_node.to_owned(),
+            ))
             .await
             .map_err(|e| Error::StorageError(e.to_string()))?
             .into_inner();
@@ -132,6 +150,40 @@ impl NarCalculationService for GRPCPathInfoService {
     }
 }
 
+#[derive(serde::Deserialize, Debug)]
+#[serde(deny_unknown_fields)]
+pub struct GRPCPathInfoServiceConfig {
+    url: String,
+}
+
+impl TryFrom<url::Url> for GRPCPathInfoServiceConfig {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    fn try_from(url: url::Url) -> Result<Self, Self::Error> {
+        //   normally grpc+unix for unix sockets, and grpc+http(s) for the HTTP counterparts.
+        // - In the case of unix sockets, there must be a path, but may not be a host.
+        // - In the case of non-unix sockets, there must be a host, but no path.
+        // Constructing the channel is handled by tvix_castore::channel::from_url.
+        Ok(GRPCPathInfoServiceConfig {
+            url: url.to_string(),
+        })
+    }
+}
+
+#[async_trait]
+impl ServiceBuilder for GRPCPathInfoServiceConfig {
+    type Output = dyn PathInfoService;
+    async fn build<'a>(
+        &'a self,
+        _instance_name: &str,
+        _context: &CompositionContext,
+    ) -> Result<Arc<dyn PathInfoService>, Box<dyn std::error::Error + Send + Sync + 'static>> {
+        let client = proto::path_info_service_client::PathInfoServiceClient::new(
+            tvix_castore::tonic::channel_from_url(&self.url.parse()?).await?,
+        );
+        Ok(Arc::new(GRPCPathInfoService::from_client(client)))
+    }
+}
+
 #[cfg(test)]
 mod tests {
     use crate::pathinfoservice::tests::make_grpc_path_info_service_client;
diff --git a/tvix/store/src/pathinfoservice/lru.rs b/tvix/store/src/pathinfoservice/lru.rs
index da674f497ad6..695c04636089 100644
--- a/tvix/store/src/pathinfoservice/lru.rs
+++ b/tvix/store/src/pathinfoservice/lru.rs
@@ -9,6 +9,7 @@ use tonic::async_trait;
 use tracing::instrument;
 
 use crate::proto::PathInfo;
+use tvix_castore::composition::{CompositionContext, ServiceBuilder};
 use tvix_castore::Error;
 
 use super::PathInfoService;
@@ -60,6 +61,34 @@ impl PathInfoService for LruPathInfoService {
     }
 }
 
+#[derive(serde::Deserialize, Debug)]
+#[serde(deny_unknown_fields)]
+pub struct LruPathInfoServiceConfig {
+    capacity: NonZeroUsize,
+}
+
+impl TryFrom<url::Url> for LruPathInfoServiceConfig {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    fn try_from(_url: url::Url) -> Result<Self, Self::Error> {
+        Err(Error::StorageError(
+            "Instantiating a LruPathInfoService from a url is not supported".into(),
+        )
+        .into())
+    }
+}
+
+#[async_trait]
+impl ServiceBuilder for LruPathInfoServiceConfig {
+    type Output = dyn PathInfoService;
+    async fn build<'a>(
+        &'a self,
+        _instance_name: &str,
+        _context: &CompositionContext,
+    ) -> Result<Arc<dyn PathInfoService>, Box<dyn std::error::Error + Send + Sync + 'static>> {
+        Ok(Arc::new(LruPathInfoService::with_capacity(self.capacity)))
+    }
+}
+
 #[cfg(test)]
 mod test {
     use std::num::NonZeroUsize;
@@ -79,8 +108,17 @@ mod test {
             let mut p = PATHINFO_1.clone();
             let root_node = p.node.as_mut().unwrap();
             if let castorepb::Node { node: Some(node) } = root_node {
-                let n = node.to_owned();
-                *node = n.rename("11111111111111111111111111111111-dummy2".into());
+                match node {
+                    castorepb::node::Node::Directory(n) => {
+                        n.name = "11111111111111111111111111111111-dummy2".into()
+                    }
+                    castorepb::node::Node::File(n) => {
+                        n.name = "11111111111111111111111111111111-dummy2".into()
+                    }
+                    castorepb::node::Node::Symlink(n) => {
+                        n.name = "11111111111111111111111111111111-dummy2".into()
+                    }
+                }
             } else {
                 unreachable!()
             }
diff --git a/tvix/store/src/pathinfoservice/memory.rs b/tvix/store/src/pathinfoservice/memory.rs
index 3de3221df27e..3fabd239c7b1 100644
--- a/tvix/store/src/pathinfoservice/memory.rs
+++ b/tvix/store/src/pathinfoservice/memory.rs
@@ -7,6 +7,7 @@ use std::{collections::HashMap, sync::Arc};
 use tokio::sync::RwLock;
 use tonic::async_trait;
 use tracing::instrument;
+use tvix_castore::composition::{CompositionContext, ServiceBuilder};
 use tvix_castore::Error;
 
 #[derive(Default)]
@@ -59,3 +60,30 @@ impl PathInfoService for MemoryPathInfoService {
         })
     }
 }
+
+#[derive(serde::Deserialize, Debug)]
+#[serde(deny_unknown_fields)]
+pub struct MemoryPathInfoServiceConfig {}
+
+impl TryFrom<url::Url> for MemoryPathInfoServiceConfig {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    fn try_from(url: url::Url) -> Result<Self, Self::Error> {
+        // memory doesn't support host or path in the URL.
+        if url.has_host() || !url.path().is_empty() {
+            return Err(Error::StorageError("invalid url".to_string()).into());
+        }
+        Ok(MemoryPathInfoServiceConfig {})
+    }
+}
+
+#[async_trait]
+impl ServiceBuilder for MemoryPathInfoServiceConfig {
+    type Output = dyn PathInfoService;
+    async fn build<'a>(
+        &'a self,
+        _instance_name: &str,
+        _context: &CompositionContext,
+    ) -> Result<Arc<dyn PathInfoService>, Box<dyn std::error::Error + Send + Sync + 'static>> {
+        Ok(Arc::new(MemoryPathInfoService::default()))
+    }
+}
diff --git a/tvix/store/src/pathinfoservice/mod.rs b/tvix/store/src/pathinfoservice/mod.rs
index 574bcc0b8b88..d118a8af1e73 100644
--- a/tvix/store/src/pathinfoservice/mod.rs
+++ b/tvix/store/src/pathinfoservice/mod.rs
@@ -4,6 +4,7 @@ mod grpc;
 mod lru;
 mod memory;
 mod nix_http;
+mod redb;
 mod sled;
 
 #[cfg(any(feature = "fuse", feature = "virtiofs"))]
@@ -14,22 +15,27 @@ mod tests;
 
 use futures::stream::BoxStream;
 use tonic::async_trait;
+use tvix_castore::composition::{Registry, ServiceBuilder};
 use tvix_castore::Error;
 
+use crate::nar::NarCalculationService;
 use crate::proto::PathInfo;
 
-pub use self::combinators::Cache as CachePathInfoService;
+pub use self::combinators::{
+    Cache as CachePathInfoService, CacheConfig as CachePathInfoServiceConfig,
+};
 pub use self::from_addr::from_addr;
-pub use self::grpc::GRPCPathInfoService;
-pub use self::lru::LruPathInfoService;
-pub use self::memory::MemoryPathInfoService;
-pub use self::nix_http::NixHTTPPathInfoService;
-pub use self::sled::SledPathInfoService;
+pub use self::grpc::{GRPCPathInfoService, GRPCPathInfoServiceConfig};
+pub use self::lru::{LruPathInfoService, LruPathInfoServiceConfig};
+pub use self::memory::{MemoryPathInfoService, MemoryPathInfoServiceConfig};
+pub use self::nix_http::{NixHTTPPathInfoService, NixHTTPPathInfoServiceConfig};
+pub use self::redb::{RedbPathInfoService, RedbPathInfoServiceConfig};
+pub use self::sled::{SledPathInfoService, SledPathInfoServiceConfig};
 
 #[cfg(feature = "cloud")]
 mod bigtable;
 #[cfg(feature = "cloud")]
-pub use self::bigtable::BigtablePathInfoService;
+pub use self::bigtable::{BigtableParameters, BigtablePathInfoService};
 
 #[cfg(any(feature = "fuse", feature = "virtiofs"))]
 pub use self::fs::make_fs;
@@ -52,12 +58,16 @@ pub trait PathInfoService: Send + Sync {
     /// Rust doesn't support this as a generic in traits yet. This is the same thing that
     /// [async_trait] generates, but for streams instead of futures.
     fn list(&self) -> BoxStream<'static, Result<PathInfo, Error>>;
+
+    fn nar_calculation_service(&self) -> Option<Box<dyn NarCalculationService>> {
+        None
+    }
 }
 
 #[async_trait]
 impl<A> PathInfoService for A
 where
-    A: AsRef<dyn PathInfoService> + Send + Sync,
+    A: AsRef<dyn PathInfoService> + Send + Sync + 'static,
 {
     async fn get(&self, digest: [u8; 20]) -> Result<Option<PathInfo>, Error> {
         self.as_ref().get(digest).await
@@ -71,3 +81,20 @@ where
         self.as_ref().list()
     }
 }
+
+/// Registers the builtin PathInfoService implementations with the registry
+pub(crate) fn register_pathinfo_services(reg: &mut Registry) {
+    reg.register::<Box<dyn ServiceBuilder<Output = dyn PathInfoService>>, CachePathInfoServiceConfig>("cache");
+    reg.register::<Box<dyn ServiceBuilder<Output = dyn PathInfoService>>, GRPCPathInfoServiceConfig>("grpc");
+    reg.register::<Box<dyn ServiceBuilder<Output = dyn PathInfoService>>, LruPathInfoServiceConfig>("lru");
+    reg.register::<Box<dyn ServiceBuilder<Output = dyn PathInfoService>>, MemoryPathInfoServiceConfig>("memory");
+    reg.register::<Box<dyn ServiceBuilder<Output = dyn PathInfoService>>, NixHTTPPathInfoServiceConfig>("nix");
+    reg.register::<Box<dyn ServiceBuilder<Output = dyn PathInfoService>>, SledPathInfoServiceConfig>("sled");
+    reg.register::<Box<dyn ServiceBuilder<Output = dyn PathInfoService>>, RedbPathInfoServiceConfig>("redb");
+    #[cfg(feature = "cloud")]
+    {
+        reg.register::<Box<dyn ServiceBuilder<Output = dyn PathInfoService>>, BigtableParameters>(
+            "bigtable",
+        );
+    }
+}
diff --git a/tvix/store/src/pathinfoservice/nix_http.rs b/tvix/store/src/pathinfoservice/nix_http.rs
index cccd4805c6ca..5f1eed1a0a9f 100644
--- a/tvix/store/src/pathinfoservice/nix_http.rs
+++ b/tvix/store/src/pathinfoservice/nix_http.rs
@@ -1,3 +1,5 @@
+use super::PathInfoService;
+use crate::{nar::ingest_nar_and_hash, proto::PathInfo};
 use futures::{stream::BoxStream, TryStreamExt};
 use nix_compat::{
     narinfo::{self, NarInfo},
@@ -5,19 +7,15 @@ use nix_compat::{
     nixhash::NixHash,
 };
 use reqwest::StatusCode;
-use sha2::Digest;
-use std::io::{self, Write};
-use tokio::io::{AsyncRead, BufReader};
-use tokio_util::io::InspectReader;
+use std::sync::Arc;
+use tokio::io::{self, AsyncRead};
 use tonic::async_trait;
 use tracing::{debug, instrument, warn};
+use tvix_castore::composition::{CompositionContext, ServiceBuilder};
 use tvix_castore::{
     blobservice::BlobService, directoryservice::DirectoryService, proto as castorepb, Error,
 };
-
-use crate::proto::PathInfo;
-
-use super::PathInfoService;
+use url::Url;
 
 /// NixHTTPPathInfoService acts as a bridge in between the Nix HTTP Binary cache
 /// protocol provided by Nix binary caches such as cache.nixos.org, and the Tvix
@@ -36,21 +34,23 @@ use super::PathInfoService;
 /// TODO: what about reading from nix-cache-info?
 pub struct NixHTTPPathInfoService<BS, DS> {
     base_url: url::Url,
-    http_client: reqwest::Client,
+    http_client: reqwest_middleware::ClientWithMiddleware,
 
     blob_service: BS,
     directory_service: DS,
 
     /// An optional list of [narinfo::PubKey].
     /// If set, the .narinfo files received need to have correct signature by at least one of these.
-    public_keys: Option<Vec<narinfo::PubKey>>,
+    public_keys: Option<Vec<narinfo::VerifyingKey>>,
 }
 
 impl<BS, DS> NixHTTPPathInfoService<BS, DS> {
     pub fn new(base_url: url::Url, blob_service: BS, directory_service: DS) -> Self {
         Self {
             base_url,
-            http_client: reqwest::Client::new(),
+            http_client: reqwest_middleware::ClientBuilder::new(reqwest::Client::new())
+                .with(tvix_tracing::propagate::reqwest::tracing_middleware())
+                .build(),
             blob_service,
             directory_service,
 
@@ -59,7 +59,7 @@ impl<BS, DS> NixHTTPPathInfoService<BS, DS> {
     }
 
     /// Configures [Self] to validate NARInfo fingerprints with the public keys passed.
-    pub fn set_public_keys(&mut self, public_keys: Vec<narinfo::PubKey>) {
+    pub fn set_public_keys(&mut self, public_keys: Vec<narinfo::VerifyingKey>) {
         self.public_keys = Some(public_keys);
     }
 }
@@ -178,9 +178,9 @@ where
         }));
 
         // handle decompression, depending on the compression field.
-        let r: Box<dyn AsyncRead + Send + Unpin> = match narinfo.compression {
-            Some("none") => Box::new(r) as Box<dyn AsyncRead + Send + Unpin>,
-            Some("bzip2") | None => Box::new(async_compression::tokio::bufread::BzDecoder::new(r))
+        let mut r: Box<dyn AsyncRead + Send + Unpin> = match narinfo.compression {
+            None => Box::new(r) as Box<dyn AsyncRead + Send + Unpin>,
+            Some("bzip2") => Box::new(async_compression::tokio::bufread::BzDecoder::new(r))
                 as Box<dyn AsyncRead + Send + Unpin>,
             Some("gzip") => Box::new(async_compression::tokio::bufread::GzipDecoder::new(r))
                 as Box<dyn AsyncRead + Send + Unpin>,
@@ -194,19 +194,8 @@ where
                 )));
             }
         };
-        let mut nar_hash = sha2::Sha256::new();
-        let mut nar_size = 0;
-
-        // Assemble NarHash and NarSize as we read bytes.
-        let r = InspectReader::new(r, |b| {
-            nar_size += b.len() as u64;
-            nar_hash.write_all(b).unwrap();
-        });
-
-        // HACK: InspectReader doesn't implement AsyncBufRead, but neither do our decompressors.
-        let mut r = BufReader::new(r);
 
-        let root_node = crate::nar::ingest_nar(
+        let (root_node, nar_hash, nar_size) = ingest_nar_and_hash(
             self.blob_service.clone(),
             self.directory_service.clone(),
             &mut r,
@@ -226,7 +215,6 @@ where
                 "NarSize mismatch".to_string(),
             ))?;
         }
-        let nar_hash: [u8; 32] = nar_hash.finalize().into();
         if narinfo.nar_hash != nar_hash {
             warn!(
                 narinfo.nar_hash = %NixHash::Sha256(narinfo.nar_hash),
@@ -240,10 +228,10 @@ where
         }
 
         Ok(Some(PathInfo {
-            node: Some(castorepb::Node {
-                // set the name of the root node to the digest-name of the store path.
-                node: Some(root_node.rename(narinfo.store_path.to_string().to_owned().into())),
-            }),
+            node: Some(castorepb::Node::from_name_and_node(
+                narinfo.store_path.to_string().into(),
+                root_node,
+            )),
             references: pathinfo.references,
             narinfo: pathinfo.narinfo,
         }))
@@ -264,3 +252,71 @@ where
         }))
     }
 }
+
+#[derive(serde::Deserialize)]
+pub struct NixHTTPPathInfoServiceConfig {
+    base_url: String,
+    blob_service: String,
+    directory_service: String,
+    #[serde(default)]
+    /// An optional list of [narinfo::PubKey].
+    /// If set, the .narinfo files received need to have correct signature by at least one of these.
+    public_keys: Option<Vec<String>>,
+}
+
+impl TryFrom<Url> for NixHTTPPathInfoServiceConfig {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    fn try_from(url: Url) -> Result<Self, Self::Error> {
+        let mut public_keys: Option<Vec<String>> = None;
+        for (_, v) in url
+            .query_pairs()
+            .into_iter()
+            .filter(|(k, _)| k == "trusted-public-keys")
+        {
+            public_keys
+                .get_or_insert(Default::default())
+                .extend(v.split_ascii_whitespace().map(ToString::to_string));
+        }
+        Ok(NixHTTPPathInfoServiceConfig {
+            // Stringify the URL and remove the nix+ prefix.
+            // We can't use `url.set_scheme(rest)`, as it disallows
+            // setting something http(s) that previously wasn't.
+            base_url: url.to_string().strip_prefix("nix+").unwrap().to_string(),
+            blob_service: "default".to_string(),
+            directory_service: "default".to_string(),
+            public_keys,
+        })
+    }
+}
+
+#[async_trait]
+impl ServiceBuilder for NixHTTPPathInfoServiceConfig {
+    type Output = dyn PathInfoService;
+    async fn build<'a>(
+        &'a self,
+        _instance_name: &str,
+        context: &CompositionContext,
+    ) -> Result<Arc<dyn PathInfoService>, Box<dyn std::error::Error + Send + Sync + 'static>> {
+        let (blob_service, directory_service) = futures::join!(
+            context.resolve(self.blob_service.clone()),
+            context.resolve(self.directory_service.clone())
+        );
+        let mut svc = NixHTTPPathInfoService::new(
+            Url::parse(&self.base_url)?,
+            blob_service?,
+            directory_service?,
+        );
+        if let Some(public_keys) = &self.public_keys {
+            svc.set_public_keys(
+                public_keys
+                    .iter()
+                    .map(|pubkey_str| {
+                        narinfo::VerifyingKey::parse(pubkey_str)
+                            .map_err(|e| Error::StorageError(format!("invalid public key: {e}")))
+                    })
+                    .collect::<Result<Vec<_>, Error>>()?,
+            );
+        }
+        Ok(Arc::new(svc))
+    }
+}
diff --git a/tvix/store/src/pathinfoservice/redb.rs b/tvix/store/src/pathinfoservice/redb.rs
new file mode 100644
index 000000000000..bd0e0fc2b686
--- /dev/null
+++ b/tvix/store/src/pathinfoservice/redb.rs
@@ -0,0 +1,218 @@
+use super::PathInfoService;
+use crate::proto::PathInfo;
+use data_encoding::BASE64;
+use futures::{stream::BoxStream, StreamExt};
+use prost::Message;
+use redb::{Database, ReadableTable, TableDefinition};
+use std::{path::PathBuf, sync::Arc};
+use tokio_stream::wrappers::ReceiverStream;
+use tonic::async_trait;
+use tracing::{instrument, warn};
+use tvix_castore::{
+    composition::{CompositionContext, ServiceBuilder},
+    Error,
+};
+
+const PATHINFO_TABLE: TableDefinition<[u8; 20], Vec<u8>> = TableDefinition::new("pathinfo");
+
+/// PathInfoService implementation using redb under the hood.
+/// redb stores all of its data in a single file with a K/V pointing from a path's output hash to
+/// its corresponding protobuf-encoded PathInfo.
+pub struct RedbPathInfoService {
+    // We wrap db in an Arc to be able to move it into spawn_blocking,
+    // as discussed in https://github.com/cberner/redb/issues/789
+    db: Arc<Database>,
+}
+
+impl RedbPathInfoService {
+    /// Constructs a new instance using the specified file system path for
+    /// storage.
+    pub async fn new(path: PathBuf) -> Result<Self, Error> {
+        if path == PathBuf::from("/") {
+            return Err(Error::StorageError(
+                "cowardly refusing to open / with redb".to_string(),
+            ));
+        }
+
+        let db = tokio::task::spawn_blocking(|| -> Result<_, redb::Error> {
+            let db = redb::Database::create(path)?;
+            create_schema(&db)?;
+            Ok(db)
+        })
+        .await??;
+
+        Ok(Self { db: Arc::new(db) })
+    }
+
+    /// Constructs a new instance using the in-memory backend.
+    pub fn new_temporary() -> Result<Self, Error> {
+        let db =
+            redb::Database::builder().create_with_backend(redb::backends::InMemoryBackend::new())?;
+
+        create_schema(&db)?;
+
+        Ok(Self { db: Arc::new(db) })
+    }
+}
+
+/// Ensures all tables are present.
+/// Opens a write transaction and calls open_table on PATHINFO_TABLE, which will
+/// create it if not present.
+fn create_schema(db: &redb::Database) -> Result<(), redb::Error> {
+    let txn = db.begin_write()?;
+    txn.open_table(PATHINFO_TABLE)?;
+    txn.commit()?;
+
+    Ok(())
+}
+
+#[async_trait]
+impl PathInfoService for RedbPathInfoService {
+    #[instrument(level = "trace", skip_all, fields(path_info.digest = BASE64.encode(&digest)))]
+    async fn get(&self, digest: [u8; 20]) -> Result<Option<PathInfo>, Error> {
+        let db = self.db.clone();
+
+        tokio::task::spawn_blocking({
+            move || {
+                let txn = db.begin_read()?;
+                let table = txn.open_table(PATHINFO_TABLE)?;
+                match table.get(digest)? {
+                    Some(pathinfo_bytes) => Ok(Some(
+                        PathInfo::decode(pathinfo_bytes.value().as_slice()).map_err(|e| {
+                            warn!(err=%e, "failed to decode stored PathInfo");
+                            Error::StorageError("failed to decode stored PathInfo".to_string())
+                        })?,
+                    )),
+                    None => Ok(None),
+                }
+            }
+        })
+        .await?
+    }
+
+    #[instrument(level = "trace", skip_all, fields(path_info.root_node = ?path_info.node))]
+    async fn put(&self, path_info: PathInfo) -> Result<PathInfo, Error> {
+        // Call validate on the received PathInfo message.
+        let store_path = path_info
+            .validate()
+            .map_err(|e| {
+                warn!(err=%e, "failed to validate PathInfo");
+                Error::StorageError("failed to validate PathInfo".to_string())
+            })?
+            .to_owned();
+
+        let path_info_encoded = path_info.encode_to_vec();
+        let db = self.db.clone();
+
+        tokio::task::spawn_blocking({
+            move || -> Result<(), Error> {
+                let txn = db.begin_write()?;
+                {
+                    let mut table = txn.open_table(PATHINFO_TABLE)?;
+                    table
+                        .insert(store_path.digest(), path_info_encoded)
+                        .map_err(|e| {
+                            warn!(err=%e, "failed to insert PathInfo");
+                            Error::StorageError("failed to insert PathInfo".to_string())
+                        })?;
+                }
+                Ok(txn.commit()?)
+            }
+        })
+        .await??;
+
+        Ok(path_info)
+    }
+
+    fn list(&self) -> BoxStream<'static, Result<PathInfo, Error>> {
+        let db = self.db.clone();
+        let (tx, rx) = tokio::sync::mpsc::channel(50);
+
+        // Spawn a blocking task which writes all PathInfos to tx.
+        tokio::task::spawn_blocking({
+            move || -> Result<(), Error> {
+                let read_txn = db.begin_read()?;
+                let table = read_txn.open_table(PATHINFO_TABLE)?;
+
+                for elem in table.iter()? {
+                    let elem = elem?;
+                    tokio::runtime::Handle::current()
+                        .block_on(tx.send(Ok(
+                            PathInfo::decode(elem.1.value().as_slice()).map_err(|e| {
+                                warn!(err=%e, "invalid PathInfo");
+                                Error::StorageError("invalid PathInfo".to_string())
+                            })?,
+                        )))
+                        .map_err(|e| Error::StorageError(e.to_string()))?;
+                }
+
+                Ok(())
+            }
+        });
+
+        ReceiverStream::from(rx).boxed()
+    }
+}
+
+#[derive(serde::Deserialize)]
+#[serde(deny_unknown_fields)]
+pub struct RedbPathInfoServiceConfig {
+    is_temporary: bool,
+    #[serde(default)]
+    /// required when is_temporary = false
+    path: Option<PathBuf>,
+}
+
+impl TryFrom<url::Url> for RedbPathInfoServiceConfig {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    fn try_from(url: url::Url) -> Result<Self, Self::Error> {
+        // redb doesn't support host, and a path can be provided (otherwise it'll live in memory only)
+        if url.has_host() {
+            return Err(Error::StorageError("no host allowed".to_string()).into());
+        }
+
+        Ok(if url.path().is_empty() {
+            RedbPathInfoServiceConfig {
+                is_temporary: true,
+                path: None,
+            }
+        } else {
+            RedbPathInfoServiceConfig {
+                is_temporary: false,
+                path: Some(url.path().into()),
+            }
+        })
+    }
+}
+
+#[async_trait]
+impl ServiceBuilder for RedbPathInfoServiceConfig {
+    type Output = dyn PathInfoService;
+    async fn build<'a>(
+        &'a self,
+        _instance_name: &str,
+        _context: &CompositionContext,
+    ) -> Result<Arc<dyn PathInfoService>, Box<dyn std::error::Error + Send + Sync + 'static>> {
+        match self {
+            RedbPathInfoServiceConfig {
+                is_temporary: true,
+                path: None,
+            } => Ok(Arc::new(RedbPathInfoService::new_temporary()?)),
+            RedbPathInfoServiceConfig {
+                is_temporary: true,
+                path: Some(_),
+            } => Err(
+                Error::StorageError("Temporary RedbPathInfoService can not have path".into())
+                    .into(),
+            ),
+            RedbPathInfoServiceConfig {
+                is_temporary: false,
+                path: None,
+            } => Err(Error::StorageError("RedbPathInfoService is missing path".into()).into()),
+            RedbPathInfoServiceConfig {
+                is_temporary: false,
+                path: Some(path),
+            } => Ok(Arc::new(RedbPathInfoService::new(path.to_owned()).await?)),
+        }
+    }
+}
diff --git a/tvix/store/src/pathinfoservice/sled.rs b/tvix/store/src/pathinfoservice/sled.rs
index eb3cf2ff1b88..837eb9d079e1 100644
--- a/tvix/store/src/pathinfoservice/sled.rs
+++ b/tvix/store/src/pathinfoservice/sled.rs
@@ -5,9 +5,10 @@ use futures::stream::BoxStream;
 use nix_compat::nixbase32;
 use prost::Message;
 use std::path::Path;
+use std::sync::Arc;
 use tonic::async_trait;
-use tracing::instrument;
-use tracing::warn;
+use tracing::{instrument, warn};
+use tvix_castore::composition::{CompositionContext, ServiceBuilder};
 use tvix_castore::Error;
 
 /// SledPathInfoService stores PathInfo in a [sled](https://github.com/spacejam/sled).
@@ -20,6 +21,12 @@ pub struct SledPathInfoService {
 
 impl SledPathInfoService {
     pub fn new<P: AsRef<Path>>(p: P) -> Result<Self, sled::Error> {
+        if p.as_ref() == Path::new("/") {
+            return Err(sled::Error::Unsupported(
+                "cowardly refusing to open / with sled".to_string(),
+            ));
+        }
+
         let config = sled::Config::default()
             .use_compression(false) // is a required parameter
             .path(p);
@@ -115,3 +122,69 @@ impl PathInfoService for SledPathInfoService {
         })
     }
 }
+
+#[derive(serde::Deserialize)]
+#[serde(deny_unknown_fields)]
+pub struct SledPathInfoServiceConfig {
+    is_temporary: bool,
+    #[serde(default)]
+    /// required when is_temporary = false
+    path: Option<String>,
+}
+
+impl TryFrom<url::Url> for SledPathInfoServiceConfig {
+    type Error = Box<dyn std::error::Error + Send + Sync>;
+    fn try_from(url: url::Url) -> Result<Self, Self::Error> {
+        // sled doesn't support host, and a path can be provided (otherwise
+        // it'll live in memory only).
+        if url.has_host() {
+            return Err(Error::StorageError("no host allowed".to_string()).into());
+        }
+
+        // TODO: expose compression and other parameters as URL parameters?
+
+        Ok(if url.path().is_empty() {
+            SledPathInfoServiceConfig {
+                is_temporary: true,
+                path: None,
+            }
+        } else {
+            SledPathInfoServiceConfig {
+                is_temporary: false,
+                path: Some(url.path().to_string()),
+            }
+        })
+    }
+}
+
+#[async_trait]
+impl ServiceBuilder for SledPathInfoServiceConfig {
+    type Output = dyn PathInfoService;
+    async fn build<'a>(
+        &'a self,
+        _instance_name: &str,
+        _context: &CompositionContext,
+    ) -> Result<Arc<dyn PathInfoService>, Box<dyn std::error::Error + Send + Sync + 'static>> {
+        match self {
+            SledPathInfoServiceConfig {
+                is_temporary: true,
+                path: None,
+            } => Ok(Arc::new(SledPathInfoService::new_temporary()?)),
+            SledPathInfoServiceConfig {
+                is_temporary: true,
+                path: Some(_),
+            } => Err(
+                Error::StorageError("Temporary SledPathInfoService can not have path".into())
+                    .into(),
+            ),
+            SledPathInfoServiceConfig {
+                is_temporary: false,
+                path: None,
+            } => Err(Error::StorageError("SledPathInfoService is missing path".into()).into()),
+            SledPathInfoServiceConfig {
+                is_temporary: false,
+                path: Some(path),
+            } => Ok(Arc::new(SledPathInfoService::new(path)?)),
+        }
+    }
+}
diff --git a/tvix/store/src/pathinfoservice/tests/mod.rs b/tvix/store/src/pathinfoservice/tests/mod.rs
index 061655e4bafc..777588e9beda 100644
--- a/tvix/store/src/pathinfoservice/tests/mod.rs
+++ b/tvix/store/src/pathinfoservice/tests/mod.rs
@@ -2,10 +2,12 @@
 //! We use [rstest] and [rstest_reuse] to provide all services we want to test
 //! against, and then apply this template to all test functions.
 
+use futures::TryStreamExt;
 use rstest::*;
 use rstest_reuse::{self, *};
 
 use super::PathInfoService;
+use crate::pathinfoservice::redb::RedbPathInfoService;
 use crate::pathinfoservice::MemoryPathInfoService;
 use crate::pathinfoservice::SledPathInfoService;
 use crate::proto::PathInfo;
@@ -26,6 +28,7 @@ use self::utils::make_bigtable_path_info_service;
     svc
 })]
 #[case::sled(SledPathInfoService::new_temporary().unwrap())]
+#[case::redb(RedbPathInfoService::new_temporary().unwrap())]
 #[cfg_attr(all(feature = "cloud",feature="integration"), case::bigtable(make_bigtable_path_info_service().await))]
 pub fn path_info_services(#[case] svc: impl PathInfoService) {}
 
@@ -68,5 +71,12 @@ async fn put_get(svc: impl PathInfoService) {
     // get it back
     let resp = svc.get(DUMMY_PATH_DIGEST).await.expect("must succeed");
 
-    assert_eq!(Some(path_info), resp);
+    assert_eq!(Some(path_info.clone()), resp);
+
+    // Ensure the listing endpoint works, and returns the same path_info.
+    // FUTUREWORK: split this, some impls might (rightfully) not support listing
+    let pathinfos: Vec<PathInfo> = svc.list().try_collect().await.expect("must succeed");
+
+    // We should get a single pathinfo back, the one we inserted.
+    assert_eq!(vec![path_info], pathinfos);
 }
diff --git a/tvix/store/src/pathinfoservice/tests/utils.rs b/tvix/store/src/pathinfoservice/tests/utils.rs
index ee170468d1d2..8b192e303b89 100644
--- a/tvix/store/src/pathinfoservice/tests/utils.rs
+++ b/tvix/store/src/pathinfoservice/tests/utils.rs
@@ -1,5 +1,6 @@
 use std::sync::Arc;
 
+use hyper_util::rt::TokioIo;
 use tonic::transport::{Endpoint, Server, Uri};
 use tvix_castore::{blobservice::BlobService, directoryservice::DirectoryService};
 
@@ -16,8 +17,11 @@ use crate::{
 /// Constructs and returns a gRPC PathInfoService.
 /// We also return memory-based {Blob,Directory}Service,
 /// as the consumer of this function accepts a 3-tuple.
-pub async fn make_grpc_path_info_service_client(
-) -> (impl BlobService, impl DirectoryService, GRPCPathInfoService) {
+pub async fn make_grpc_path_info_service_client() -> (
+    impl BlobService,
+    impl DirectoryService,
+    GRPCPathInfoService<tonic::transport::Channel>,
+) {
     let (left, right) = tokio::io::duplex(64);
 
     let blob_service = blob_service();
@@ -54,7 +58,7 @@ pub async fn make_grpc_path_info_service_client(
             .unwrap()
             .connect_with_connector(tower::service_fn(move |_: Uri| {
                 let right = maybe_right.take().unwrap();
-                async move { Ok::<_, std::io::Error>(right) }
+                async move { Ok::<_, std::io::Error>(TokioIo::new(right)) }
             }))
             .await
             .unwrap(),
diff --git a/tvix/store/src/proto/grpc_pathinfoservice_wrapper.rs b/tvix/store/src/proto/grpc_pathinfoservice_wrapper.rs
index 68f557567629..60da73012df7 100644
--- a/tvix/store/src/proto/grpc_pathinfoservice_wrapper.rs
+++ b/tvix/store/src/proto/grpc_pathinfoservice_wrapper.rs
@@ -74,24 +74,19 @@ where
         &self,
         request: Request<castorepb::Node>,
     ) -> Result<Response<proto::CalculateNarResponse>> {
-        match request.into_inner().node {
-            None => Err(Status::invalid_argument("no root node sent")),
-            Some(root_node) => {
-                if let Err(e) = root_node.validate() {
-                    warn!(err = %e, "invalid root node");
-                    Err(Status::invalid_argument("invalid root node"))?
-                }
+        let (_, root_node) = request.into_inner().into_name_and_node().map_err(|e| {
+            warn!(err = %e, "invalid root node");
+            Status::invalid_argument("invalid root node")
+        })?;
 
-                match self.nar_calculation_service.calculate_nar(&root_node).await {
-                    Ok((nar_size, nar_sha256)) => Ok(Response::new(proto::CalculateNarResponse {
-                        nar_size,
-                        nar_sha256: nar_sha256.to_vec().into(),
-                    })),
-                    Err(e) => {
-                        warn!(err = %e, "error during NAR calculation");
-                        Err(e.into())
-                    }
-                }
+        match self.nar_calculation_service.calculate_nar(&root_node).await {
+            Ok((nar_size, nar_sha256)) => Ok(Response::new(proto::CalculateNarResponse {
+                nar_size,
+                nar_sha256: nar_sha256.to_vec().into(),
+            })),
+            Err(e) => {
+                warn!(err = %e, "error during NAR calculation");
+                Err(e.into())
             }
         }
     }
diff --git a/tvix/store/src/proto/mod.rs b/tvix/store/src/proto/mod.rs
index a09839c8bdf9..f3ea4b196946 100644
--- a/tvix/store/src/proto/mod.rs
+++ b/tvix/store/src/proto/mod.rs
@@ -9,7 +9,7 @@ use nix_compat::{
     store_path::{self, StorePathRef},
 };
 use thiserror::Error;
-use tvix_castore::proto::{self as castorepb, NamedNode, ValidateNodeError};
+use tvix_castore::DirectoryError;
 
 mod grpc_pathinfoservice_wrapper;
 
@@ -39,7 +39,7 @@ pub enum ValidatePathInfoError {
 
     /// Node fails validation
     #[error("Invalid root node: {:?}", .0.to_string())]
-    InvalidRootNode(ValidateNodeError),
+    InvalidRootNode(DirectoryError),
 
     /// Invalid node name encountered. Root nodes in PathInfos have more strict name requirements
     #[error("Failed to parse {} as StorePath: {1}", .0.to_str_lossy())]
@@ -87,7 +87,7 @@ impl PathInfo {
     /// validate performs some checks on the PathInfo struct,
     /// Returning either a [store_path::StorePath] of the root node, or a
     /// [ValidatePathInfoError].
-    pub fn validate(&self) -> Result<store_path::StorePathRef<'_>, ValidatePathInfoError> {
+    pub fn validate(&self) -> Result<store_path::StorePath<String>, ValidatePathInfoError> {
         // ensure the references have the right number of bytes.
         for (i, reference) in self.references.iter().enumerate() {
             if reference.len() != store_path::DIGEST_SIZE {
@@ -118,7 +118,7 @@ impl PathInfo {
             // parse references in reference_names.
             for (i, reference_name_str) in narinfo.reference_names.iter().enumerate() {
                 // ensure thy parse as (non-absolute) store path
-                let reference_names_store_path = store_path::StorePath::from_bytes(
+                let reference_names_store_path = store_path::StorePathRef::from_bytes(
                     reference_name_str.as_bytes(),
                 )
                 .map_err(|_| {
@@ -158,14 +158,20 @@ impl PathInfo {
 
         // Ensure there is a (root) node present, and it properly parses to a [store_path::StorePath].
         let root_nix_path = match &self.node {
-            None | Some(castorepb::Node { node: None }) => {
-                Err(ValidatePathInfoError::NoNodePresent)?
-            }
-            Some(castorepb::Node { node: Some(node) }) => {
-                node.validate()
+            None => Err(ValidatePathInfoError::NoNodePresent)?,
+            Some(node) => {
+                // NOTE: We could have some PathComponent not allocating here,
+                // so this can return StorePathRef.
+                // However, as this will get refactored away to stricter types
+                // soon anyways, there's no point.
+                let (name, _node) = node
+                    .clone()
+                    .into_name_and_node()
                     .map_err(ValidatePathInfoError::InvalidRootNode)?;
+
                 // parse the name of the node itself and return
-                parse_node_name_root(node.get_name(), ValidatePathInfoError::InvalidNodeName)?
+                parse_node_name_root(name.as_ref(), ValidatePathInfoError::InvalidNodeName)?
+                    .to_owned()
             }
         };
 
@@ -219,7 +225,7 @@ impl PathInfo {
                 .signatures
                 .iter()
                 .map(|sig| {
-                    nix_compat::narinfo::Signature::new(
+                    nix_compat::narinfo::SignatureRef::new(
                         &sig.name,
                         // This shouldn't pass validation
                         sig.data[..].try_into().expect("invalid signature len"),
@@ -260,24 +266,19 @@ impl TryFrom<&nar_info::Ca> for nix_compat::nixhash::CAHash {
 
     fn try_from(value: &nar_info::Ca) -> Result<Self, Self::Error> {
         Ok(match value.r#type {
-            typ if typ == nar_info::ca::Hash::FlatMd5 as i32 => {
-                Self::Flat(NixHash::Md5(value.digest[..].try_into().map_err(|_| {
-                    ConvertCAError::InvalidReferenceDigestLen(value.digest.len(), "FlatMd5")
-                })?))
-            }
-            typ if typ == nar_info::ca::Hash::FlatSha1 as i32 => {
-                Self::Flat(NixHash::Sha1(value.digest[..].try_into().map_err(
-                    |_| ConvertCAError::InvalidReferenceDigestLen(value.digest.len(), "FlatSha1"),
+            typ if typ == nar_info::ca::Hash::NarSha256 as i32 => {
+                Self::Nar(NixHash::Sha256(value.digest[..].try_into().map_err(
+                    |_| ConvertCAError::InvalidReferenceDigestLen(value.digest.len(), "NarSha256"),
                 )?))
             }
-            typ if typ == nar_info::ca::Hash::FlatSha256 as i32 => {
-                Self::Flat(NixHash::Sha256(value.digest[..].try_into().map_err(
-                    |_| ConvertCAError::InvalidReferenceDigestLen(value.digest.len(), "FlatSha256"),
+            typ if typ == nar_info::ca::Hash::NarSha1 as i32 => {
+                Self::Nar(NixHash::Sha1(value.digest[..].try_into().map_err(
+                    |_| ConvertCAError::InvalidReferenceDigestLen(value.digest.len(), "NarSha1"),
                 )?))
             }
-            typ if typ == nar_info::ca::Hash::FlatSha512 as i32 => Self::Flat(NixHash::Sha512(
+            typ if typ == nar_info::ca::Hash::NarSha512 as i32 => Self::Nar(NixHash::Sha512(
                 Box::new(value.digest[..].try_into().map_err(|_| {
-                    ConvertCAError::InvalidReferenceDigestLen(value.digest.len(), "FlatSha512")
+                    ConvertCAError::InvalidReferenceDigestLen(value.digest.len(), "NarSha512")
                 })?),
             )),
             typ if typ == nar_info::ca::Hash::NarMd5 as i32 => {
@@ -285,19 +286,29 @@ impl TryFrom<&nar_info::Ca> for nix_compat::nixhash::CAHash {
                     ConvertCAError::InvalidReferenceDigestLen(value.digest.len(), "NarMd5")
                 })?))
             }
-            typ if typ == nar_info::ca::Hash::NarSha1 as i32 => {
-                Self::Nar(NixHash::Sha1(value.digest[..].try_into().map_err(
-                    |_| ConvertCAError::InvalidReferenceDigestLen(value.digest.len(), "NarSha1"),
+            typ if typ == nar_info::ca::Hash::TextSha256 as i32 => {
+                Self::Text(value.digest[..].try_into().map_err(|_| {
+                    ConvertCAError::InvalidReferenceDigestLen(value.digest.len(), "TextSha256")
+                })?)
+            }
+            typ if typ == nar_info::ca::Hash::FlatSha1 as i32 => {
+                Self::Flat(NixHash::Sha1(value.digest[..].try_into().map_err(
+                    |_| ConvertCAError::InvalidReferenceDigestLen(value.digest.len(), "FlatSha1"),
                 )?))
             }
-            typ if typ == nar_info::ca::Hash::NarSha256 as i32 => {
-                Self::Nar(NixHash::Sha256(value.digest[..].try_into().map_err(
-                    |_| ConvertCAError::InvalidReferenceDigestLen(value.digest.len(), "NarSha256"),
+            typ if typ == nar_info::ca::Hash::FlatMd5 as i32 => {
+                Self::Flat(NixHash::Md5(value.digest[..].try_into().map_err(|_| {
+                    ConvertCAError::InvalidReferenceDigestLen(value.digest.len(), "FlatMd5")
+                })?))
+            }
+            typ if typ == nar_info::ca::Hash::FlatSha256 as i32 => {
+                Self::Flat(NixHash::Sha256(value.digest[..].try_into().map_err(
+                    |_| ConvertCAError::InvalidReferenceDigestLen(value.digest.len(), "FlatSha256"),
                 )?))
             }
-            typ if typ == nar_info::ca::Hash::NarSha512 as i32 => Self::Nar(NixHash::Sha512(
+            typ if typ == nar_info::ca::Hash::FlatSha512 as i32 => Self::Flat(NixHash::Sha512(
                 Box::new(value.digest[..].try_into().map_err(|_| {
-                    ConvertCAError::InvalidReferenceDigestLen(value.digest.len(), "NarSha512")
+                    ConvertCAError::InvalidReferenceDigestLen(value.digest.len(), "FlatSha512")
                 })?),
             )),
             typ => return Err(ConvertCAError::UnknownHashType(typ)),
@@ -349,7 +360,7 @@ impl From<&nix_compat::narinfo::NarInfo<'_>> for NarInfo {
             signatures,
             reference_names: value.references.iter().map(|r| r.to_string()).collect(),
             deriver: value.deriver.as_ref().map(|sp| StorePath {
-                name: sp.name().to_owned(),
+                name: (*sp.name()).to_owned(),
                 digest: Bytes::copy_from_slice(sp.digest()),
             }),
             ca: value.ca.as_ref().map(|ca| ca.into()),
diff --git a/tvix/store/src/proto/tests/pathinfo.rs b/tvix/store/src/proto/tests/pathinfo.rs
index 4d0834878d7c..eb47e592be8c 100644
--- a/tvix/store/src/proto/tests/pathinfo.rs
+++ b/tvix/store/src/proto/tests/pathinfo.rs
@@ -3,17 +3,18 @@ use crate::tests::fixtures::*;
 use bytes::Bytes;
 use data_encoding::BASE64;
 use nix_compat::nixbase32;
-use nix_compat::store_path::{self, StorePathRef};
+use nix_compat::store_path::{self, StorePath, StorePathRef};
 use rstest::rstest;
 use tvix_castore::proto as castorepb;
+use tvix_castore::{DirectoryError, ValidateNodeError};
 
 #[rstest]
 #[case::no_node(None, Err(ValidatePathInfoError::NoNodePresent))]
-#[case::no_node_2(Some(castorepb::Node { node: None}), Err(ValidatePathInfoError::NoNodePresent))]
+#[case::no_node_2(Some(castorepb::Node { node: None}), Err(ValidatePathInfoError::InvalidRootNode(DirectoryError::NoNodeSet)))]
 
 fn validate_pathinfo(
     #[case] node: Option<castorepb::Node>,
-    #[case] exp_result: Result<StorePathRef, ValidatePathInfoError>,
+    #[case] exp_result: Result<StorePath<String>, ValidatePathInfoError>,
 ) {
     // construct the PathInfo object
     let p = PathInfo {
@@ -22,9 +23,6 @@ fn validate_pathinfo(
     };
 
     assert_eq!(exp_result, p.validate());
-
-    let err = p.validate().expect_err("validation should fail");
-    assert!(matches!(err, ValidatePathInfoError::NoNodePresent));
 }
 
 #[rstest]
@@ -32,12 +30,12 @@ fn validate_pathinfo(
         name: DUMMY_PATH.into(),
         digest: DUMMY_DIGEST.clone().into(),
         size: 0,
-}, Ok(StorePathRef::from_bytes(DUMMY_PATH.as_bytes()).unwrap()))]
+}, Ok(StorePath::from_bytes(DUMMY_PATH.as_bytes()).unwrap()))]
 #[case::invalid_digest_length(castorepb::DirectoryNode {
         name: DUMMY_PATH.into(),
         digest: Bytes::new(),
         size: 0,
-}, Err(ValidatePathInfoError::InvalidRootNode(castorepb::ValidateNodeError::InvalidDigestLen(0))))]
+}, Err(ValidatePathInfoError::InvalidRootNode(DirectoryError::InvalidNode(DUMMY_PATH.try_into().unwrap(), ValidateNodeError::InvalidDigestLen(0)))))]
 #[case::invalid_node_name_no_storepath(castorepb::DirectoryNode {
         name: "invalid".into(),
         digest: DUMMY_DIGEST.clone().into(),
@@ -48,7 +46,7 @@ fn validate_pathinfo(
 )))]
 fn validate_directory(
     #[case] directory_node: castorepb::DirectoryNode,
-    #[case] exp_result: Result<StorePathRef, ValidatePathInfoError>,
+    #[case] exp_result: Result<StorePath<String>, ValidatePathInfoError>,
 ) {
     // construct the PathInfo object
     let p = PathInfo {
@@ -68,7 +66,7 @@ fn validate_directory(
         size: 0,
         executable: false,
     },
-    Ok(StorePathRef::from_bytes(DUMMY_PATH.as_bytes()).unwrap())
+    Ok(StorePath::from_bytes(DUMMY_PATH.as_bytes()).unwrap())
 )]
 #[case::invalid_digest_len(
     castorepb::FileNode {
@@ -76,7 +74,7 @@ fn validate_directory(
         digest: Bytes::new(),
         ..Default::default()
     },
-    Err(ValidatePathInfoError::InvalidRootNode(castorepb::ValidateNodeError::InvalidDigestLen(0)))
+    Err(ValidatePathInfoError::InvalidRootNode(DirectoryError::InvalidNode(DUMMY_PATH.try_into().unwrap(), ValidateNodeError::InvalidDigestLen(0))))
 )]
 #[case::invalid_node_name(
     castorepb::FileNode {
@@ -91,7 +89,7 @@ fn validate_directory(
 )]
 fn validate_file(
     #[case] file_node: castorepb::FileNode,
-    #[case] exp_result: Result<StorePathRef, ValidatePathInfoError>,
+    #[case] exp_result: Result<StorePath<String>, ValidatePathInfoError>,
 ) {
     // construct the PathInfo object
     let p = PathInfo {
@@ -109,7 +107,7 @@ fn validate_file(
         name: DUMMY_PATH.into(),
         target: "foo".into(),
     },
-    Ok(StorePathRef::from_bytes(DUMMY_PATH.as_bytes()).unwrap())
+    Ok(StorePath::from_bytes(DUMMY_PATH.as_bytes()).unwrap())
 )]
 #[case::invalid_node_name(
     castorepb::SymlinkNode {
@@ -123,7 +121,7 @@ fn validate_file(
 )]
 fn validate_symlink(
     #[case] symlink_node: castorepb::SymlinkNode,
-    #[case] exp_result: Result<StorePathRef, ValidatePathInfoError>,
+    #[case] exp_result: Result<StorePath<String>, ValidatePathInfoError>,
 ) {
     // construct the PathInfo object
     let p = PathInfo {
@@ -228,24 +226,28 @@ fn validate_inconsistent_narinfo_reference_name_digest() {
 /// Create a node with an empty symlink target, and ensure it fails validation.
 #[test]
 fn validate_symlink_empty_target_invalid() {
-    let node = castorepb::node::Node::Symlink(castorepb::SymlinkNode {
-        name: "foo".into(),
-        target: "".into(),
-    });
-
-    node.validate().expect_err("must fail validation");
+    castorepb::Node {
+        node: Some(castorepb::node::Node::Symlink(castorepb::SymlinkNode {
+            name: "foo".into(),
+            target: "".into(),
+        })),
+    }
+    .into_name_and_node()
+    .expect_err("must fail validation");
 }
 
 /// Create a node with a symlink target including null bytes, and ensure it
 /// fails validation.
 #[test]
 fn validate_symlink_target_null_byte_invalid() {
-    let node = castorepb::node::Node::Symlink(castorepb::SymlinkNode {
-        name: "foo".into(),
-        target: "foo\0".into(),
-    });
-
-    node.validate().expect_err("must fail validation");
+    castorepb::Node {
+        node: Some(castorepb::node::Node::Symlink(castorepb::SymlinkNode {
+            name: "foo".into(),
+            target: "foo\0".into(),
+        })),
+    }
+    .into_name_and_node()
+    .expect_err("must fail validation");
 }
 
 /// Create a PathInfo with a correct deriver field and ensure it succeeds.
diff --git a/tvix/store/src/tests/nar_renderer.rs b/tvix/store/src/tests/nar_renderer.rs
index 8bfb5a72bb2f..03eaa28aaac8 100644
--- a/tvix/store/src/tests/nar_renderer.rs
+++ b/tvix/store/src/tests/nar_renderer.rs
@@ -10,7 +10,7 @@ use std::sync::Arc;
 use tokio::io::sink;
 use tvix_castore::blobservice::BlobService;
 use tvix_castore::directoryservice::DirectoryService;
-use tvix_castore::proto as castorepb;
+use tvix_castore::Node;
 
 #[rstest]
 #[tokio::test]
@@ -22,10 +22,9 @@ async fn single_symlink(
 
     write_nar(
         &mut buf,
-        &castorepb::node::Node::Symlink(castorepb::SymlinkNode {
-            name: "doesntmatter".into(),
-            target: "/nix/store/somewhereelse".into(),
-        }),
+        &Node::Symlink {
+            target: "/nix/store/somewhereelse".try_into().unwrap(),
+        },
         // don't put anything in the stores, as we don't actually do any requests.
         blob_service,
         directory_service,
@@ -45,12 +44,11 @@ async fn single_file_missing_blob(
 ) {
     let e = write_nar(
         sink(),
-        &castorepb::node::Node::File(castorepb::FileNode {
-            name: "doesntmatter".into(),
-            digest: HELLOWORLD_BLOB_DIGEST.clone().into(),
+        &Node::File {
+            digest: HELLOWORLD_BLOB_DIGEST.clone(),
             size: HELLOWORLD_BLOB_CONTENTS.len() as u64,
             executable: false,
-        }),
+        },
         // the blobservice is empty intentionally, to provoke the error.
         blob_service,
         directory_service,
@@ -90,12 +88,11 @@ async fn single_file_wrong_blob_size(
     // Test with a root FileNode of a too big size
     let e = write_nar(
         sink(),
-        &castorepb::node::Node::File(castorepb::FileNode {
-            name: "doesntmatter".into(),
-            digest: HELLOWORLD_BLOB_DIGEST.clone().into(),
+        &Node::File {
+            digest: HELLOWORLD_BLOB_DIGEST.clone(),
             size: 42, // <- note the wrong size here!
             executable: false,
-        }),
+        },
         blob_service.clone(),
         directory_service.clone(),
     )
@@ -112,12 +109,11 @@ async fn single_file_wrong_blob_size(
     // Test with a root FileNode of a too small size
     let e = write_nar(
         sink(),
-        &castorepb::node::Node::File(castorepb::FileNode {
-            name: "doesntmatter".into(),
-            digest: HELLOWORLD_BLOB_DIGEST.clone().into(),
+        &Node::File {
+            digest: HELLOWORLD_BLOB_DIGEST.clone(),
             size: 2, // <- note the wrong size here!
             executable: false,
-        }),
+        },
         blob_service,
         directory_service,
     )
@@ -153,12 +149,11 @@ async fn single_file(
 
     write_nar(
         &mut buf,
-        &castorepb::node::Node::File(castorepb::FileNode {
-            name: "doesntmatter".into(),
-            digest: HELLOWORLD_BLOB_DIGEST.clone().into(),
+        &Node::File {
+            digest: HELLOWORLD_BLOB_DIGEST.clone(),
             size: HELLOWORLD_BLOB_CONTENTS.len() as u64,
             executable: false,
-        }),
+        },
         blob_service,
         directory_service,
     )
@@ -196,11 +191,10 @@ async fn test_complicated(
 
     write_nar(
         &mut buf,
-        &castorepb::node::Node::Directory(castorepb::DirectoryNode {
-            name: "doesntmatter".into(),
-            digest: DIRECTORY_COMPLICATED.digest().into(),
+        &Node::Directory {
+            digest: DIRECTORY_COMPLICATED.digest(),
             size: DIRECTORY_COMPLICATED.size(),
-        }),
+        },
         blob_service.clone(),
         directory_service.clone(),
     )
@@ -211,11 +205,10 @@ async fn test_complicated(
 
     // ensure calculate_nar does return the correct sha256 digest and sum.
     let (nar_size, nar_digest) = calculate_size_and_sha256(
-        &castorepb::node::Node::Directory(castorepb::DirectoryNode {
-            name: "doesntmatter".into(),
-            digest: DIRECTORY_COMPLICATED.digest().into(),
+        &Node::Directory {
+            digest: DIRECTORY_COMPLICATED.digest(),
             size: DIRECTORY_COMPLICATED.size(),
-        }),
+        },
         blob_service,
         directory_service,
     )
diff --git a/tvix/store/src/utils.rs b/tvix/store/src/utils.rs
index e6e42f6ec4cf..1385ece39f8a 100644
--- a/tvix/store/src/utils.rs
+++ b/tvix/store/src/utils.rs
@@ -1,48 +1,214 @@
-use std::sync::Arc;
 use std::{
+    collections::HashMap,
     pin::Pin,
+    sync::Arc,
     task::{self, Poll},
 };
 use tokio::io::{self, AsyncWrite};
 
-use tvix_castore::{
-    blobservice::{self, BlobService},
-    directoryservice::{self, DirectoryService},
-};
+use tvix_castore::{blobservice::BlobService, directoryservice::DirectoryService};
+use url::Url;
 
+use crate::composition::{
+    with_registry, Composition, DeserializeWithRegistry, ServiceBuilder, REG,
+};
 use crate::nar::{NarCalculationService, SimpleRenderer};
-use crate::pathinfoservice::{self, PathInfoService};
+use crate::pathinfoservice::PathInfoService;
+
+#[derive(serde::Deserialize, Default)]
+pub struct CompositionConfigs {
+    pub blobservices:
+        HashMap<String, DeserializeWithRegistry<Box<dyn ServiceBuilder<Output = dyn BlobService>>>>,
+    pub directoryservices: HashMap<
+        String,
+        DeserializeWithRegistry<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>>,
+    >,
+    pub pathinfoservices: HashMap<
+        String,
+        DeserializeWithRegistry<Box<dyn ServiceBuilder<Output = dyn PathInfoService>>>,
+    >,
+}
+
+/// Provides a set clap arguments to configure tvix-[ca]store services.
+///
+/// This particular variant has defaults tailored for usecases accessing data
+/// directly locally, like the `tvix-store daemon` command.
+#[derive(clap::Parser, Clone)]
+pub struct ServiceUrls {
+    #[arg(
+        long,
+        env,
+        default_value = "objectstore+file:///var/lib/tvix-store/blobs.object_store"
+    )]
+    blob_service_addr: String,
+
+    #[arg(
+        long,
+        env,
+        default_value = "redb:///var/lib/tvix-store/directories.redb"
+    )]
+    directory_service_addr: String,
+
+    #[arg(long, env, default_value = "redb:///var/lib/tvix-store/pathinfo.redb")]
+    path_info_service_addr: String,
+
+    /// Path to a TOML file describing the way the services should be composed
+    /// Experimental because the format is not final.
+    /// If specified, the other service addrs are ignored.
+    #[cfg(feature = "xp-store-composition")]
+    #[arg(long, env)]
+    experimental_store_composition: Option<String>,
+}
+
+/// Provides a set clap arguments to configure tvix-[ca]store services.
+///
+/// This particular variant has defaults tailored for usecases accessing data
+/// from another running tvix daemon, via gRPC.
+#[derive(clap::Parser, Clone)]
+pub struct ServiceUrlsGrpc {
+    #[arg(long, env, default_value = "grpc+http://[::1]:8000")]
+    blob_service_addr: String,
+
+    #[arg(long, env, default_value = "grpc+http://[::1]:8000")]
+    directory_service_addr: String,
+
+    #[arg(long, env, default_value = "grpc+http://[::1]:8000")]
+    path_info_service_addr: String,
+
+    #[cfg(feature = "xp-store-composition")]
+    #[arg(long, env)]
+    experimental_store_composition: Option<String>,
+}
+
+/// Provides a set clap arguments to configure tvix-[ca]store services.
+///
+/// This particular variant has defaults tailored for usecases keeping all data
+/// in memory.
+/// It's currently used in tvix-cli, as we don't really care about persistency
+/// there yet, and using something else here might make some perf output harder
+/// to interpret.
+#[derive(clap::Parser, Clone)]
+pub struct ServiceUrlsMemory {
+    #[arg(long, env, default_value = "memory://")]
+    blob_service_addr: String,
+
+    #[arg(long, env, default_value = "memory://")]
+    directory_service_addr: String,
+
+    #[arg(long, env, default_value = "memory://")]
+    path_info_service_addr: String,
+
+    #[cfg(feature = "xp-store-composition")]
+    #[arg(long, env)]
+    experimental_store_composition: Option<String>,
+}
+
+impl From<ServiceUrlsGrpc> for ServiceUrls {
+    fn from(urls: ServiceUrlsGrpc) -> ServiceUrls {
+        ServiceUrls {
+            blob_service_addr: urls.blob_service_addr,
+            directory_service_addr: urls.directory_service_addr,
+            path_info_service_addr: urls.path_info_service_addr,
+            #[cfg(feature = "xp-store-composition")]
+            experimental_store_composition: urls.experimental_store_composition,
+        }
+    }
+}
+
+impl From<ServiceUrlsMemory> for ServiceUrls {
+    fn from(urls: ServiceUrlsMemory) -> ServiceUrls {
+        ServiceUrls {
+            blob_service_addr: urls.blob_service_addr,
+            directory_service_addr: urls.directory_service_addr,
+            path_info_service_addr: urls.path_info_service_addr,
+            #[cfg(feature = "xp-store-composition")]
+            experimental_store_composition: urls.experimental_store_composition,
+        }
+    }
+}
+
+pub async fn addrs_to_configs(
+    urls: impl Into<ServiceUrls>,
+) -> Result<CompositionConfigs, Box<dyn std::error::Error + Send + Sync>> {
+    let urls: ServiceUrls = urls.into();
+
+    #[cfg(feature = "xp-store-composition")]
+    if let Some(conf_path) = urls.experimental_store_composition {
+        let conf_text = tokio::fs::read_to_string(conf_path).await?;
+        return Ok(with_registry(&REG, || toml::from_str(&conf_text))?);
+    }
+
+    let mut configs: CompositionConfigs = Default::default();
+
+    let blob_service_url = Url::parse(&urls.blob_service_addr)?;
+    let directory_service_url = Url::parse(&urls.directory_service_addr)?;
+    let path_info_service_url = Url::parse(&urls.path_info_service_addr)?;
+
+    configs.blobservices.insert(
+        "default".into(),
+        with_registry(&REG, || blob_service_url.try_into())?,
+    );
+    configs.directoryservices.insert(
+        "default".into(),
+        with_registry(&REG, || directory_service_url.try_into())?,
+    );
+    configs.pathinfoservices.insert(
+        "default".into(),
+        with_registry(&REG, || path_info_service_url.try_into())?,
+    );
+
+    Ok(configs)
+}
 
 /// Construct the store handles from their addrs.
 pub async fn construct_services(
-    blob_service_addr: impl AsRef<str>,
-    directory_service_addr: impl AsRef<str>,
-    path_info_service_addr: impl AsRef<str>,
-) -> std::io::Result<(
-    Arc<dyn BlobService>,
-    Arc<dyn DirectoryService>,
-    Box<dyn PathInfoService>,
-    Box<dyn NarCalculationService>,
-)> {
-    let blob_service: Arc<dyn BlobService> = blobservice::from_addr(blob_service_addr.as_ref())
-        .await?
-        .into();
-    let directory_service: Arc<dyn DirectoryService> =
-        directoryservice::from_addr(directory_service_addr.as_ref())
-            .await?
-            .into();
-    let path_info_service = pathinfoservice::from_addr(
-        path_info_service_addr.as_ref(),
-        blob_service.clone(),
-        directory_service.clone(),
-    )
-    .await?;
-
-    // TODO: grpc client also implements NarCalculationService
-    let nar_calculation_service = Box::new(SimpleRenderer::new(
-        blob_service.clone(),
-        directory_service.clone(),
-    )) as Box<dyn NarCalculationService>;
+    urls: impl Into<ServiceUrls>,
+) -> Result<
+    (
+        Arc<dyn BlobService>,
+        Arc<dyn DirectoryService>,
+        Arc<dyn PathInfoService>,
+        Box<dyn NarCalculationService>,
+    ),
+    Box<dyn std::error::Error + Send + Sync>,
+> {
+    let configs = addrs_to_configs(urls).await?;
+    construct_services_from_configs(configs).await
+}
+
+/// Construct the store handles from their addrs.
+pub async fn construct_services_from_configs(
+    configs: CompositionConfigs,
+) -> Result<
+    (
+        Arc<dyn BlobService>,
+        Arc<dyn DirectoryService>,
+        Arc<dyn PathInfoService>,
+        Box<dyn NarCalculationService>,
+    ),
+    Box<dyn std::error::Error + Send + Sync>,
+> {
+    let mut comp = Composition::default();
+
+    comp.extend(configs.blobservices);
+    comp.extend(configs.directoryservices);
+    comp.extend(configs.pathinfoservices);
+
+    let blob_service: Arc<dyn BlobService> = comp.build("default").await?;
+    let directory_service: Arc<dyn DirectoryService> = comp.build("default").await?;
+    let path_info_service: Arc<dyn PathInfoService> = comp.build("default").await?;
+
+    // HACK: The grpc client also implements NarCalculationService, and we
+    // really want to use it (otherwise we'd need to fetch everything again for hashing).
+    // Until we revamped store composition and config, detect this special case here.
+    let nar_calculation_service: Box<dyn NarCalculationService> = path_info_service
+        .nar_calculation_service()
+        .unwrap_or_else(|| {
+            Box::new(SimpleRenderer::new(
+                blob_service.clone(),
+                directory_service.clone(),
+            ))
+        });
 
     Ok((
         blob_service,
diff --git a/tvix/tools/crunch-v2/Cargo.lock b/tvix/tools/crunch-v2/Cargo.lock
index 3748d7e4e989..71f0516c844d 100644
--- a/tvix/tools/crunch-v2/Cargo.lock
+++ b/tvix/tools/crunch-v2/Cargo.lock
@@ -365,9 +365,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
 
 [[package]]
 name = "bytes"
-version = "1.5.0"
+version = "1.6.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223"
+checksum = "a12916984aab3fa6e39d655a33e09c0071eb36d6ab3aea5c2d78551f1df6d952"
 
 [[package]]
 name = "bzip2"
@@ -627,16 +627,15 @@ dependencies = [
 
 [[package]]
 name = "curve25519-dalek"
-version = "4.1.1"
+version = "4.1.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c"
+checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be"
 dependencies = [
  "cfg-if",
  "cpufeatures",
  "curve25519-dalek-derive",
  "digest 0.10.7",
  "fiat-crypto",
- "platforms",
  "rustc_version",
  "subtle",
  "zeroize",
@@ -655,9 +654,9 @@ dependencies = [
 
 [[package]]
 name = "data-encoding"
-version = "2.4.0"
+version = "2.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308"
+checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2"
 
 [[package]]
 name = "der"
@@ -999,9 +998,9 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
 
 [[package]]
 name = "h2"
-version = "0.3.22"
+version = "0.3.26"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4d6250322ef6e60f93f9a2162799302cd6f68f79f6e5d85c8c16f14d1d958178"
+checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8"
 dependencies = [
  "bytes",
  "fnv",
@@ -1244,6 +1243,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
 
 [[package]]
+name = "libmimalloc-sys"
+version = "0.1.39"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "23aa6811d3bd4deb8a84dde645f943476d13b248d818edcf8ce0b2f37f036b44"
+dependencies = [
+ "cc",
+ "libc",
+]
+
+[[package]]
 name = "libredox"
 version = "0.0.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1343,6 +1352,15 @@ dependencies = [
 ]
 
 [[package]]
+name = "mimalloc"
+version = "0.1.43"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68914350ae34959d83f732418d51e2427a794055d0b9529f48259ac07af65633"
+dependencies = [
+ "libmimalloc-sys",
+]
+
+[[package]]
 name = "minimal-lexical"
 version = "0.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1359,9 +1377,9 @@ dependencies = [
 
 [[package]]
 name = "mio"
-version = "0.8.9"
+version = "0.8.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0"
+checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c"
 dependencies = [
  "libc",
  "wasi",
@@ -1402,11 +1420,13 @@ version = "0.1.0"
 dependencies = [
  "bitflags 2.4.1",
  "bstr",
+ "bytes",
  "data-encoding",
  "ed25519",
  "ed25519-dalek",
  "enum-primitive-derive",
  "glob",
+ "mimalloc",
  "nom",
  "num-traits",
  "pin-project-lite",
@@ -1415,6 +1435,7 @@ dependencies = [
  "sha2 0.10.8",
  "thiserror",
  "tokio",
+ "tracing",
 ]
 
 [[package]]
@@ -1610,12 +1631,6 @@ dependencies = [
 ]
 
 [[package]]
-name = "platforms"
-version = "3.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "14e6ab3f592e6fb464fc9712d8d6e6912de6473954635fd76a589d832cffcbb0"
-
-[[package]]
 name = "polars"
 version = "0.35.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2441,9 +2456,9 @@ dependencies = [
 
 [[package]]
 name = "shlex"
-version = "1.2.0"
+version = "1.3.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a7cee0529a6d40f580e7a5e6c495c8fbfe21b7b52795ed4bb5e62cdf92bc6380"
+checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
 
 [[package]]
 name = "signal-hook-registry"
@@ -2763,10 +2778,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
 dependencies = [
  "pin-project-lite",
+ "tracing-attributes",
  "tracing-core",
 ]
 
 [[package]]
+name = "tracing-attributes"
+version = "0.1.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.39",
+]
+
+[[package]]
 name = "tracing-core"
 version = "0.1.32"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3176,18 +3203,18 @@ dependencies = [
 
 [[package]]
 name = "zerocopy"
-version = "0.7.26"
+version = "0.7.35"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e97e415490559a91254a2979b4829267a57d2fcd741a98eee8b722fb57289aa0"
+checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0"
 dependencies = [
  "zerocopy-derive",
 ]
 
 [[package]]
 name = "zerocopy-derive"
-version = "0.7.26"
+version = "0.7.35"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dd7e48ccf166952882ca8bd778a43502c64f33bf94c12ebe2a7f08e5a0f6689f"
+checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
 dependencies = [
  "proc-macro2",
  "quote",
diff --git a/tvix/tools/crunch-v2/Cargo.nix b/tvix/tools/crunch-v2/Cargo.nix
index 35c09ecee711..5af8289a6720 100644
--- a/tvix/tools/crunch-v2/Cargo.nix
+++ b/tvix/tools/crunch-v2/Cargo.nix
@@ -1,4 +1,4 @@
-# This file was @generated by crate2nix 0.13.0 with the command:
+# This file was @generated by crate2nix 0.14.1 with the command:
 #   "generate" "--all-features"
 # See https://github.com/kolloch/crate2nix for more info.
 
@@ -13,6 +13,8 @@
 , rootFeatures ? [ "default" ]
   # If true, throw errors instead of issueing deprecation warnings.
 , strictDeprecation ? false
+  # Elements to add to the `-C target-feature=` argument passed to `rustc`
+  # (separated by `,`, prefixed with `+`).
   # Used for conditional compilation based on CPU feature detection.
 , targetFeatures ? [ ]
   # Whether to perform release builds: longer compile times, faster binaries.
@@ -205,6 +207,7 @@ rec {
         edition = "2015";
         crateBin = [ ];
         sha256 = "1cy6r2sfv5y5cigv86vms7n5nlwhx1rbyxwcraqnmm1rxiib2yyc";
+        libName = "alloc_no_stdlib";
         authors = [
           "Daniel Reiter Horn <danielrh@dropbox.com>"
         ];
@@ -216,6 +219,7 @@ rec {
         edition = "2015";
         crateBin = [ ];
         sha256 = "1kkfbld20ab4165p29v172h8g0wvq8i06z8vnng14whw0isq5ywl";
+        libName = "alloc_stdlib";
         authors = [
           "Daniel Reiter Horn <danielrh@dropbox.com>"
         ];
@@ -232,6 +236,7 @@ rec {
         version = "0.2.16";
         edition = "2018";
         sha256 = "1iayppgq4wqbfbfcqmsbwgamj0s65012sskfvyx07pxavk3gyhh9";
+        libName = "allocator_api2";
         authors = [
           "Zakarum <zaq.dev@icloud.com>"
         ];
@@ -247,6 +252,7 @@ rec {
         version = "0.1.1";
         edition = "2018";
         sha256 = "1w7ynjxrfs97xg3qlcdns4kgfpwcdv824g611fq32cag4cdr96g9";
+        libName = "android_tzdata";
         authors = [
           "RumovZ"
         ];
@@ -324,6 +330,7 @@ rec {
         version = "0.2.3";
         edition = "2021";
         sha256 = "134jhzrz89labrdwxxnjxqjdg06qvaflj1wkfnmyapwyldfwcnn7";
+        libName = "anstyle_parse";
         dependencies = [
           {
             name = "utf8parse";
@@ -343,6 +350,7 @@ rec {
         version = "1.0.2";
         edition = "2021";
         sha256 = "0j3na4b1nma39g4x7cwvj009awxckjf3z2vkwhldgka44hqj72g2";
+        libName = "anstyle_query";
         dependencies = [
           {
             name = "windows-sys";
@@ -358,6 +366,7 @@ rec {
         version = "3.0.2";
         edition = "2021";
         sha256 = "19v0fv400bmp4niqpzxnhg83vz12mmqv7l2l8vi80qcdxj0lpm8w";
+        libName = "anstyle_wincon";
         dependencies = [
           {
             name = "anstyle";
@@ -422,6 +431,7 @@ rec {
         version = "0.2.0";
         edition = "2021";
         sha256 = "0xpbqf7qkvzplpjd7f0wbcf2n1v9vygdccwxkd1amxp4il0hlzdz";
+        libName = "array_init_cursor";
 
       };
       "arrayref" = rec {
@@ -453,6 +463,7 @@ rec {
         version = "0.8.1";
         edition = "2018";
         sha256 = "1irj67p6c224dzw86jr7j3z9r5zfid52gy6ml8rdqk4r2si4x207";
+        libName = "arrow_format";
         authors = [
           "Jorge C. Leitao <jorgecarleitao@gmail.com>"
         ];
@@ -488,6 +499,7 @@ rec {
         version = "0.3.5";
         edition = "2018";
         sha256 = "0l8sjq1rylkb1ak0pdyjn83b3k6x36j22myngl4sqqgg7whdsmnd";
+        libName = "async_stream";
         authors = [
           "Carl Lerche <me@carllerche.com>"
         ];
@@ -513,6 +525,7 @@ rec {
         edition = "2018";
         sha256 = "14q179j4y8p2z1d0ic6aqgy9fhwz8p9cai1ia8kpw4bw7q12mrhn";
         procMacro = true;
+        libName = "async_stream_impl";
         authors = [
           "Carl Lerche <me@carllerche.com>"
         ];
@@ -539,6 +552,7 @@ rec {
         edition = "2021";
         sha256 = "1ydhbsqjqqa6bxbv0kgys2wq2vi3jpwjy57dk162ajwppgqkfrd6";
         procMacro = true;
+        libName = "async_trait";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -790,6 +804,7 @@ rec {
         version = "0.10.4";
         edition = "2018";
         sha256 = "0w9sa2ypmrsqqvc20nhwr75wbb5cjr4kkyhpjm1z1lv2kdicfy1h";
+        libName = "block_buffer";
         authors = [
           "RustCrypto Developers"
         ];
@@ -806,6 +821,7 @@ rec {
         version = "0.9.0";
         edition = "2018";
         sha256 = "1r4pf90s7d7lj1wdjhlnqa26vvbm6pnc33z138lxpnp9srpi2lj1";
+        libName = "block_buffer";
         authors = [
           "RustCrypto Developers"
         ];
@@ -863,6 +879,7 @@ rec {
         edition = "2015";
         crateBin = [ ];
         sha256 = "0kyyh9701dwqzwvn2frff4ww0zibikqd1s1xvl7n1pfpc3z4lbjf";
+        libName = "brotli_decompressor";
         authors = [
           "Daniel Reiter Horn <danielrh@dropbox.com>"
           "The Brotli Authors"
@@ -998,9 +1015,9 @@ rec {
       };
       "bytes" = rec {
         crateName = "bytes";
-        version = "1.5.0";
+        version = "1.6.1";
         edition = "2018";
-        sha256 = "08w2i8ac912l8vlvkv3q51cd4gr09pwlg3sjsjffcizlrb0i5gd2";
+        sha256 = "0lnryqfiymbq5mfflfmbsqvfnw80kkh36nk5kpiscgxb9ac1cad1";
         authors = [
           "Carl Lerche <me@carllerche.com>"
           "Sean McArthur <sean@seanmonstar.com>"
@@ -1098,6 +1115,7 @@ rec {
         version = "1.0.0";
         edition = "2018";
         sha256 = "1za0vb97n4brpzpv8lsbnzmq5r8f2b0cpqqr0sy8h5bn751xxwds";
+        libName = "cfg_if";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -1333,6 +1351,7 @@ rec {
         version = "0.9.5";
         edition = "2021";
         sha256 = "0vxb4d25mgk8y0phay7j078limx2553716ixsr1x5605k31j5h98";
+        libName = "const_oid";
         authors = [
           "RustCrypto Developers"
         ];
@@ -1355,6 +1374,7 @@ rec {
         version = "0.9.3";
         edition = "2015";
         sha256 = "0ii1ihpjb30fk38gdikm5wqlkmyr8k46fh4k2r8sagz5dng7ljhr";
+        libName = "core_foundation";
         authors = [
           "The Servo Project Developers"
         ];
@@ -1382,6 +1402,7 @@ rec {
         version = "0.8.4";
         edition = "2015";
         sha256 = "1yhf471qj6snnm2mcswai47vsbc9w30y4abmdp4crb4av87sb5p4";
+        libName = "core_foundation_sys";
         authors = [
           "The Servo Project Developers"
         ];
@@ -1399,7 +1420,7 @@ rec {
           {
             name = "libc";
             packageId = "libc";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-linux-android");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "aarch64-linux-android");
           }
           {
             name = "libc";
@@ -1444,6 +1465,7 @@ rec {
         version = "0.5.8";
         edition = "2018";
         sha256 = "004jz4wxp9k26z657i7rsh9s7586dklx2c5aqf1n3w1dgzvjng53";
+        libName = "crossbeam_channel";
         dependencies = [
           {
             name = "cfg-if";
@@ -1468,6 +1490,7 @@ rec {
         version = "0.8.3";
         edition = "2018";
         sha256 = "1vqczbcild7nczh5z116w8w46z991kpjyw7qxkf24c14apwdcvyf";
+        libName = "crossbeam_deque";
         dependencies = [
           {
             name = "cfg-if";
@@ -1499,6 +1522,7 @@ rec {
         version = "0.9.15";
         edition = "2018";
         sha256 = "1ixwc3cq816wb8rlh3ix4jnybqbyyq4l61nwlx0mfm3ck0s148df";
+        libName = "crossbeam_epoch";
         dependencies = [
           {
             name = "cfg-if";
@@ -1539,6 +1563,7 @@ rec {
         version = "0.3.8";
         edition = "2018";
         sha256 = "1p9s6n4ckwdgxkb7a8ay9zjzmgc8ppfbxix2vr07rwskibmb7kyi";
+        libName = "crossbeam_queue";
         dependencies = [
           {
             name = "cfg-if";
@@ -1562,6 +1587,7 @@ rec {
         version = "0.8.16";
         edition = "2018";
         sha256 = "153j0gikblz7n7qdvdi8pslhi008s1yp9cmny6vw07ad7pbb48js";
+        libName = "crossbeam_utils";
         dependencies = [
           {
             name = "cfg-if";
@@ -1590,12 +1616,8 @@ rec {
             requiredFeatures = [ ];
           }
         ];
-        # We can't filter paths with references in Nix 2.4
-        # See https://github.com/NixOS/nix/issues/5410
-        src =
-          if ((lib.versionOlder builtins.nixVersion "2.4pre20211007") || (lib.versionOlder "2.5" builtins.nixVersion))
-          then lib.cleanSourceWith { filter = sourceFilter; src = ./.; }
-          else ./.;
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ./.; };
+        libName = "crunch_v2";
         dependencies = [
           {
             name = "anyhow";
@@ -1705,6 +1727,7 @@ rec {
         version = "0.1.6";
         edition = "2018";
         sha256 = "1cvby95a6xg7kxdz5ln3rl9xh66nz66w46mm3g56ri1z5x815yqv";
+        libName = "crypto_common";
         authors = [
           "RustCrypto Developers"
         ];
@@ -1730,6 +1753,7 @@ rec {
         version = "0.11.1";
         edition = "2018";
         sha256 = "05672ncc54h66vph42s0a42ljl69bwnqjh0x4xgj2v1395psildi";
+        libName = "crypto_mac";
         authors = [
           "RustCrypto Developers"
         ];
@@ -1752,9 +1776,10 @@ rec {
       };
       "curve25519-dalek" = rec {
         crateName = "curve25519-dalek";
-        version = "4.1.1";
+        version = "4.1.3";
         edition = "2021";
-        sha256 = "0p7ns5917k6369gajrsbfj24llc5zfm635yh3abla7sb5rm8r6z8";
+        sha256 = "1gmjb9dsknrr8lypmhkyjd67p1arb8mbfamlwxm7vph38my8pywp";
+        libName = "curve25519_dalek";
         authors = [
           "Isis Lovecruft <isis@patternsinthevoid.net>"
           "Henry de Valence <hdevalence@hdevalence.ca>"
@@ -1800,10 +1825,6 @@ rec {
         ];
         buildDependencies = [
           {
-            name = "platforms";
-            packageId = "platforms";
-          }
-          {
             name = "rustc_version";
             packageId = "rustc_version";
           }
@@ -1827,6 +1848,7 @@ rec {
         edition = "2021";
         sha256 = "1cry71xxrr0mcy5my3fb502cwfxy6822k4pm19cwrilrg7hq4s7l";
         procMacro = true;
+        libName = "curve25519_dalek_derive";
         dependencies = [
           {
             name = "proc-macro2";
@@ -1846,9 +1868,10 @@ rec {
       };
       "data-encoding" = rec {
         crateName = "data-encoding";
-        version = "2.4.0";
+        version = "2.6.0";
         edition = "2018";
-        sha256 = "023k3dk8422jgbj7k72g63x51h1mhv91dhw1j4h205vzh6fnrrn2";
+        sha256 = "1qnn68n4vragxaxlkqcb1r28d3hhj43wch67lm4rpxlw89wnjmp8";
+        libName = "data_encoding";
         authors = [
           "Julien Cretin <git@ia0.eu>"
         ];
@@ -1953,6 +1976,7 @@ rec {
         version = "2.0.0";
         edition = "2018";
         sha256 = "1q9kr151h9681wwp6is18750ssghz6j9j7qm7qi1ngcwy7mzi35r";
+        libName = "dirs_next";
         authors = [
           "The @xdg-rs members"
         ];
@@ -1973,6 +1997,7 @@ rec {
         version = "0.1.2";
         edition = "2018";
         sha256 = "0kavhavdxv4phzj4l0psvh55hszwnr0rcz8sxbvx20pyqi2a3gaf";
+        libName = "dirs_sys_next";
         authors = [
           "The @xdg-rs members"
         ];
@@ -2002,6 +2027,7 @@ rec {
         version = "1.0.16";
         edition = "2018";
         sha256 = "0pa9kas6a241pbx0q82ipwi4f7m7wwyzkkc725caky24gl4j4nsl";
+        libName = "dyn_clone";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -2044,6 +2070,7 @@ rec {
         version = "2.1.0";
         edition = "2021";
         sha256 = "1h13qm789m9gdjl6jazss80hqi8ll37m0afwcnw23zcbqjp8wqhz";
+        libName = "ed25519_dalek";
         authors = [
           "isis lovecruft <isis@patternsinthevoid.net>"
           "Tony Arcieri <bascule@gmail.com>"
@@ -2145,6 +2172,33 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "std" ];
       };
+      "enum-primitive-derive" = rec {
+        crateName = "enum-primitive-derive";
+        version = "0.3.0";
+        edition = "2018";
+        sha256 = "0k6wcf58h5kh64yq5nfq71va53kaya0kzxwsjwbgwm2n2zd9axxs";
+        procMacro = true;
+        libName = "enum_primitive_derive";
+        authors = [
+          "Doug Goldstein <cardoe@cardoe.com>"
+        ];
+        dependencies = [
+          {
+            name = "num-traits";
+            packageId = "num-traits";
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "quote";
+            packageId = "quote";
+          }
+          {
+            name = "syn";
+            packageId = "syn 2.0.39";
+          }
+        ];
+
+      };
       "enum_dispatch" = rec {
         crateName = "enum_dispatch";
         version = "0.3.12";
@@ -2241,6 +2295,7 @@ rec {
         version = "0.1.9";
         edition = "2015";
         sha256 = "0nj6j26p71bjy8h42x6jahx1hn0ng6mc2miwpgwnp8vnwqf4jq3k";
+        libName = "fallible_streaming_iterator";
         authors = [
           "Steven Fackler <sfackler@gmail.com>"
         ];
@@ -2251,6 +2306,7 @@ rec {
         version = "0.2.0";
         edition = "2018";
         sha256 = "0g7kfll3xyh99kc7r352lhljnwvgayxxa6saifb6725inikmyxlm";
+        libName = "fast_float";
         authors = [
           "Ivan Smirnov <i.s.smirnov@gmail.com>"
         ];
@@ -2296,6 +2352,7 @@ rec {
         version = "0.2.5";
         edition = "2018";
         sha256 = "1dxn0g50pv0ppal779vi7k40fr55pbhkyv4in7i13pgl4sn3wmr7";
+        libName = "fiat_crypto";
         authors = [
           "Fiat Crypto library authors <jgross@mit.edu>"
         ];
@@ -2475,6 +2532,7 @@ rec {
         version = "0.3.29";
         edition = "2018";
         sha256 = "1jxsifvrbqzdadk0svbax71cba5d3qg3wgjq8i160mxmd1kdckgz";
+        libName = "futures_channel";
         dependencies = [
           {
             name = "futures-core";
@@ -2502,6 +2560,7 @@ rec {
         version = "0.3.29";
         edition = "2018";
         sha256 = "1308bpj0g36nhx2y6bl4mm6f1gnh9xyvvw2q2wpdgnb6dv3247gb";
+        libName = "futures_core";
         features = {
           "default" = [ "std" ];
           "portable-atomic" = [ "dep:portable-atomic" ];
@@ -2514,6 +2573,7 @@ rec {
         version = "0.3.29";
         edition = "2018";
         sha256 = "1g4pjni0sw28djx6mlcfz584abm2lpifz86cmng0kkxh7mlvhkqg";
+        libName = "futures_executor";
         dependencies = [
           {
             name = "futures-core";
@@ -2544,6 +2604,7 @@ rec {
         version = "0.3.29";
         edition = "2018";
         sha256 = "1ajsljgny3zfxwahba9byjzclrgvm1ypakca8z854k2w7cb4mwwb";
+        libName = "futures_io";
         features = {
           "default" = [ "std" ];
         };
@@ -2555,6 +2616,7 @@ rec {
         edition = "2018";
         sha256 = "1nwd18i8kvpkdfwm045hddjli0n96zi7pn6f99zi9c74j7ym7cak";
         procMacro = true;
+        libName = "futures_macro";
         dependencies = [
           {
             name = "proc-macro2";
@@ -2577,6 +2639,7 @@ rec {
         version = "0.3.29";
         edition = "2018";
         sha256 = "05q8jykqddxzp8nwf00wjk5m5mqi546d7i8hsxma7hiqxrw36vg3";
+        libName = "futures_sink";
         features = {
           "default" = [ "std" ];
           "std" = [ "alloc" ];
@@ -2588,6 +2651,7 @@ rec {
         version = "0.3.29";
         edition = "2018";
         sha256 = "1qmsss8rb5ppql4qvd4r70h9gpfcpd0bg2b3qilxrnhdkc397lgg";
+        libName = "futures_task";
         features = {
           "default" = [ "std" ];
           "std" = [ "alloc" ];
@@ -2599,6 +2663,7 @@ rec {
         version = "0.3.29";
         edition = "2018";
         sha256 = "0141rkqh0psj4h8x8lgsl1p29dhqr7z2wcixkcbs60z74kb2d5d1";
+        libName = "futures_util";
         dependencies = [
           {
             name = "futures-channel";
@@ -2802,9 +2867,9 @@ rec {
       };
       "h2" = rec {
         crateName = "h2";
-        version = "0.3.22";
+        version = "0.3.26";
         edition = "2018";
-        sha256 = "0y41jlflvw8niifdirgng67zdmic62cjf5m2z69hzrpn5qr50qjd";
+        sha256 = "1s7msnfv7xprzs6xzfj5sg6p8bjcdpcqcmjjbkd345cyi1x55zl1";
         authors = [
           "Carl Lerche <me@carllerche.com>"
           "Sean McArthur <sean@seanmonstar.com>"
@@ -2941,6 +3006,7 @@ rec {
         version = "0.3.3";
         edition = "2021";
         sha256 = "1dyc8qsjh876n74a3rcz8h43s27nj1sypdhsn2ms61bd3b47wzyp";
+        libName = "hermit_abi";
         authors = [
           "Stefan Lankes"
         ];
@@ -3045,6 +3111,7 @@ rec {
         version = "0.4.5";
         edition = "2018";
         sha256 = "1l967qwwlvhp198xdrnc0p5d7jwfcp6q2lm510j6zqw4s4b8zwym";
+        libName = "http_body";
         authors = [
           "Carl Lerche <me@carllerche.com>"
           "Lucio Franco <luciofranco14@gmail.com>"
@@ -3201,6 +3268,7 @@ rec {
         version = "0.23.2";
         edition = "2018";
         sha256 = "0736s6a32dqr107f943xaz1n05flbinq6l19lq1wsrxkc5g9d20p";
+        libName = "hyper_rustls";
         dependencies = [
           {
             name = "http";
@@ -3275,6 +3343,7 @@ rec {
         version = "0.1.58";
         edition = "2018";
         sha256 = "081vcr8z8ddhl5r1ywif6grnswk01b2ac4nks2bhn8zzdimvh9l3";
+        libName = "iana_time_zone";
         authors = [
           "Andrew Straw <strawman@astraw.com>"
           "Renรฉ Kijewski <rene.kijewski@fu-berlin.de>"
@@ -3320,6 +3389,7 @@ rec {
         version = "0.1.2";
         edition = "2018";
         sha256 = "17r6jmj31chn7xs9698r122mapq85mfnv98bb4pg6spm0si2f67k";
+        libName = "iana_time_zone_haiku";
         authors = [
           "Renรฉ Kijewski <crates.io@k6i.de>"
         ];
@@ -3480,6 +3550,7 @@ rec {
         version = "0.3.65";
         edition = "2018";
         sha256 = "1s1gaxgzpqfyygc7f2pwp9y128rh5f8zvsc4nm5yazgna9cw7h2l";
+        libName = "js_sys";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -3534,6 +3605,33 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" ];
       };
+      "libmimalloc-sys" = rec {
+        crateName = "libmimalloc-sys";
+        version = "0.1.39";
+        edition = "2018";
+        links = "mimalloc";
+        sha256 = "0i3b0dzz7cp0ik7ys66q92r16va78gwlbrnxhj5fnkdxsc8niai3";
+        libName = "libmimalloc_sys";
+        authors = [
+          "Octavian Oncescu <octavonce@gmail.com>"
+        ];
+        dependencies = [
+          {
+            name = "libc";
+            packageId = "libc";
+          }
+        ];
+        buildDependencies = [
+          {
+            name = "cc";
+            packageId = "cc";
+          }
+        ];
+        features = {
+          "cty" = [ "dep:cty" ];
+          "extended" = [ "cty" ];
+        };
+      };
       "libredox" = rec {
         crateName = "libredox";
         version = "0.0.1";
@@ -3567,6 +3665,7 @@ rec {
         version = "0.4.11";
         edition = "2021";
         sha256 = "0adqqaya81s7k5r323g65pw6q85pxd1x4prz9whh5i4abysqi54n";
+        libName = "linux_raw_sys";
         authors = [
           "Dan Gohman <dev@sunfishcode.online>"
         ];
@@ -3654,6 +3753,7 @@ rec {
         edition = "2015";
         links = "lz4";
         sha256 = "0059ik4xlvnss5qfh6l691psk4g3350ljxaykzv10yr0gqqppljp";
+        libName = "lz4_sys";
         authors = [
           "Jens Heyens <jens.heyens@ewetel.net>"
           "Artem V. Navrotskiy <bozaro@buzzsoft.ru>"
@@ -3679,6 +3779,7 @@ rec {
         edition = "2018";
         links = "lzma";
         sha256 = "09sxp20waxyglgn3cjz8qjkspb3ryz2fwx4rigkwvrk46ymh9njz";
+        libName = "lzma_sys";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -3795,11 +3896,40 @@ rec {
         features = { };
         resolvedDefaultFeatures = [ "default" ];
       };
+      "mimalloc" = rec {
+        crateName = "mimalloc";
+        version = "0.1.43";
+        edition = "2018";
+        sha256 = "0csnyrxc16i592gm5ffham07jyj2w98qsh9jyy1rv59lmr8474b8";
+        authors = [
+          "Octavian Oncescu <octavonce@gmail.com>"
+          "Vincent Rouillรฉ <vincent@speedy37.fr>"
+          "Thom Chiovoloni <chiovolonit@gmail.com>"
+        ];
+        dependencies = [
+          {
+            name = "libmimalloc-sys";
+            packageId = "libmimalloc-sys";
+            usesDefaultFeatures = false;
+          }
+        ];
+        features = {
+          "debug" = [ "libmimalloc-sys/debug" ];
+          "debug_in_debug" = [ "libmimalloc-sys/debug_in_debug" ];
+          "extended" = [ "libmimalloc-sys/extended" ];
+          "local_dynamic_tls" = [ "libmimalloc-sys/local_dynamic_tls" ];
+          "no_thp" = [ "libmimalloc-sys/no_thp" ];
+          "override" = [ "libmimalloc-sys/override" ];
+          "secure" = [ "libmimalloc-sys/secure" ];
+        };
+        resolvedDefaultFeatures = [ "default" ];
+      };
       "minimal-lexical" = rec {
         crateName = "minimal-lexical";
         version = "0.2.1";
         edition = "2018";
         sha256 = "16ppc5g84aijpri4jzv14rvcnslvlpphbszc7zzp6vfkddf4qdb8";
+        libName = "minimal_lexical";
         authors = [
           "Alex Huszagh <ahuszagh@gmail.com>"
         ];
@@ -3837,9 +3967,9 @@ rec {
       };
       "mio" = rec {
         crateName = "mio";
-        version = "0.8.9";
+        version = "0.8.11";
         edition = "2018";
-        sha256 = "1l23hg513c23nhcdzvk25caaj28mic6qgqadbn8axgj6bqf2ikix";
+        sha256 = "034byyl0ardml5yliy1hmvx8arkmn9rv479pid794sm07ia519m4";
         authors = [
           "Carl Lerche <me@carllerche.com>"
           "Thomas de Zeeuw <thomasdezeeuw@gmail.com>"
@@ -3920,6 +4050,7 @@ rec {
         edition = "2021";
         sha256 = "1j1avbxw7jscyi7dmnywhlwbiny1fvg1vpp9fy4dc1pd022kva16";
         procMacro = true;
+        libName = "multiversion_macros";
         authors = [
           "Caleb Zulawski <caleb.zulawski@gmail.com>"
         ];
@@ -3952,12 +4083,8 @@ rec {
         version = "0.1.0";
         edition = "2021";
         crateBin = [ ];
-        # We can't filter paths with references in Nix 2.4
-        # See https://github.com/NixOS/nix/issues/5410
-        src =
-          if ((lib.versionOlder builtins.nixVersion "2.4pre20211007") || (lib.versionOlder "2.5" builtins.nixVersion))
-          then lib.cleanSourceWith { filter = sourceFilter; src = ../../nix-compat; }
-          else ../../nix-compat;
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ../../nix-compat; };
+        libName = "nix_compat";
         dependencies = [
           {
             name = "bitflags";
@@ -3969,6 +4096,11 @@ rec {
             features = [ "alloc" "unicode" "serde" ];
           }
           {
+            name = "bytes";
+            packageId = "bytes";
+            optional = true;
+          }
+          {
             name = "data-encoding";
             packageId = "data-encoding";
           }
@@ -3981,14 +4113,31 @@ rec {
             packageId = "ed25519-dalek";
           }
           {
+            name = "enum-primitive-derive";
+            packageId = "enum-primitive-derive";
+          }
+          {
             name = "glob";
             packageId = "glob";
           }
           {
+            name = "mimalloc";
+            packageId = "mimalloc";
+          }
+          {
             name = "nom";
             packageId = "nom";
           }
           {
+            name = "num-traits";
+            packageId = "num-traits";
+          }
+          {
+            name = "pin-project-lite";
+            packageId = "pin-project-lite";
+            optional = true;
+          }
+          {
             name = "serde";
             packageId = "serde";
             features = [ "derive" ];
@@ -4005,17 +4154,36 @@ rec {
             name = "thiserror";
             packageId = "thiserror";
           }
+          {
+            name = "tokio";
+            packageId = "tokio";
+            optional = true;
+            features = [ "io-util" "macros" ];
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+          }
         ];
         devDependencies = [
           {
+            name = "mimalloc";
+            packageId = "mimalloc";
+          }
+          {
             name = "serde_json";
             packageId = "serde_json";
           }
         ];
         features = {
-          "async" = [ "futures-util" ];
-          "futures-util" = [ "dep:futures-util" ];
+          "async" = [ "tokio" ];
+          "bytes" = [ "dep:bytes" ];
+          "default" = [ "async" "wire" ];
+          "pin-project-lite" = [ "dep:pin-project-lite" ];
+          "tokio" = [ "dep:tokio" ];
+          "wire" = [ "tokio" "pin-project-lite" "bytes" ];
         };
+        resolvedDefaultFeatures = [ "async" "bytes" "default" "pin-project-lite" "tokio" "wire" ];
       };
       "nom" = rec {
         crateName = "nom";
@@ -4084,9 +4252,10 @@ rec {
       };
       "num-traits" = rec {
         crateName = "num-traits";
-        version = "0.2.17";
-        edition = "2018";
-        sha256 = "0z16bi5zwgfysz6765v3rd6whfbjpihx3mhsn4dg8dzj2c221qrr";
+        version = "0.2.19";
+        edition = "2021";
+        sha256 = "0h984rhdkkqd4ny9cif7y2azl3xdfb7768hb9irhpsch4q3gq787";
+        libName = "num_traits";
         authors = [
           "The Rust Project Developers"
         ];
@@ -4199,6 +4368,7 @@ rec {
         version = "0.3.0";
         edition = "2018";
         sha256 = "1m8kzi4nd6shdqimn0mgb24f0hxslhnqd1whakyq06wcqd086jk2";
+        libName = "opaque_debug";
         authors = [
           "RustCrypto Developers"
         ];
@@ -4209,6 +4379,7 @@ rec {
         version = "0.1.5";
         edition = "2015";
         sha256 = "1kq18qm48rvkwgcggfkqq6pm948190czqc94d6bm2sir5hq1l0gz";
+        libName = "openssl_probe";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -4364,6 +4535,7 @@ rec {
         version = "0.2.4";
         edition = "2021";
         sha256 = "07wf6wf4jrxlq5p3xldxsnabp7jl06my2qp7kiwy9m3x2r5wac8i";
+        libName = "parquet_format_safe";
         authors = [
           "Apache Thrift contributors <dev@thrift.apache.org>"
           "Jorge Leitao <jorgecarleitao@gmail.com>"
@@ -4393,6 +4565,7 @@ rec {
         version = "2.3.0";
         edition = "2018";
         sha256 = "152slflmparkh27hprw62sph8rv77wckzhwl2dhqk6bf563lfalv";
+        libName = "percent_encoding";
         authors = [
           "The rust-url developers"
         ];
@@ -4437,6 +4610,7 @@ rec {
         version = "0.2.13";
         edition = "2018";
         sha256 = "0n0bwr5qxlf0mhn2xkl36sy55118s9qmvx2yl5f3ixkb007lbywa";
+        libName = "pin_project_lite";
 
       };
       "pin-utils" = rec {
@@ -4444,6 +4618,7 @@ rec {
         version = "0.1.0";
         edition = "2018";
         sha256 = "117ir7vslsl2z1a7qzhws4pd01cg2d3338c47swjyvqv2n60v1wb";
+        libName = "pin_utils";
         authors = [
           "Josef Brandl <mail@josefbrandl.de>"
         ];
@@ -4488,6 +4663,7 @@ rec {
         version = "0.3.27";
         edition = "2015";
         sha256 = "0r39ryh1magcq4cz5g9x88jllsnxnhcqr753islvyk4jp9h2h1r6";
+        libName = "pkg_config";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -4509,21 +4685,6 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "std" ];
       };
-      "platforms" = rec {
-        crateName = "platforms";
-        version = "3.2.0";
-        edition = "2018";
-        sha256 = "1c6bzwn877aqdbbmyqsl753ycbciwvbdh4lpzijb8vrfb4zsprhl";
-        authors = [
-          "Tony Arcieri <bascule@gmail.com>"
-          "Sergey \"Shnatsel\" Davidoff <shnatsel@gmail.com>"
-        ];
-        features = {
-          "default" = [ "std" ];
-          "serde" = [ "dep:serde" ];
-        };
-        resolvedDefaultFeatures = [ "default" "std" ];
-      };
       "polars" = rec {
         crateName = "polars";
         version = "0.35.4";
@@ -4718,6 +4879,7 @@ rec {
         version = "0.35.4";
         edition = "2021";
         sha256 = "1rxa9dfsqy7mh4w9gy7y7kpig0wrzrjqi1axj43rnxyrlqq38l6x";
+        libName = "polars_arrow";
         authors = [
           "Jorge C. Leitao <jorgecarleitao@gmail.com>"
           "Apache Arrow <dev@arrow.apache.org>"
@@ -4779,7 +4941,7 @@ rec {
           {
             name = "getrandom";
             packageId = "getrandom";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "wasm32-unknown-unknown");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "wasm32-unknown-unknown");
             features = [ "js" ];
           }
           {
@@ -4896,6 +5058,7 @@ rec {
         version = "0.35.4";
         edition = "2021";
         sha256 = "1mnginlmgmlp167ij0r5lywvy50zns1cr8db1ikxxv2xwnwdawxf";
+        libName = "polars_core";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -5045,6 +5208,7 @@ rec {
         version = "0.35.4";
         edition = "2021";
         sha256 = "127l5hazh5hn73j767cfyjwgbvvbab8hj53l1jp976daivb201gb";
+        libName = "polars_error";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -5081,6 +5245,7 @@ rec {
         version = "0.35.4";
         edition = "2021";
         sha256 = "01mwcdikw7y92xjhlsmj4wf0p9r3kvmhrvsbnsfh1mmc8l3hmqcn";
+        libName = "polars_io";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -5239,6 +5404,7 @@ rec {
         version = "0.35.4";
         edition = "2021";
         sha256 = "084gp9qa1w9b7dglcmrvlx6xrcl7hw5nmlb26w6xvrjvf1czfm9m";
+        libName = "polars_lazy";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -5428,6 +5594,7 @@ rec {
         version = "0.35.4";
         edition = "2021";
         sha256 = "0dpnxcgc57k8xvp4jmjhz8jwz8rclf62h22zjiwpzaka54cb4zhs";
+        libName = "polars_ops";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -5568,6 +5735,7 @@ rec {
         version = "0.35.4";
         edition = "2021";
         sha256 = "1pb79wb1f31pk48lfm2w72hdfxqz6vv65iyxb072skfxnzj10q0l";
+        libName = "polars_parquet";
         authors = [
           "Jorge C. Leitao <jorgecarleitao@gmail.com>"
           "Apache Arrow <dev@arrow.apache.org>"
@@ -5688,6 +5856,7 @@ rec {
         version = "0.35.4";
         edition = "2021";
         sha256 = "1v5xniw8yxx9cnksc4bf169b3p7gcl6dzryzgfd2m4scyrylw2b6";
+        libName = "polars_pipe";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -5795,6 +5964,7 @@ rec {
         version = "0.35.4";
         edition = "2021";
         sha256 = "1g2z0g0hpg05jp4n9s6m6m32adrjrdlq6zxd5c9lp1ggb04jmqqh";
+        libName = "polars_plan";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -5978,6 +6148,7 @@ rec {
         version = "0.35.4";
         edition = "2021";
         sha256 = "1b3d8pdxz12dzg75nqb7b2p83l15c1z4r6589sknp462ra0sndfi";
+        libName = "polars_row";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -6007,6 +6178,7 @@ rec {
         version = "0.35.4";
         edition = "2021";
         sha256 = "17jiflfmyymz1fdk2mrsdri2yqs1h7rqn66y3yny79a9d1wdgnxq";
+        libName = "polars_sql";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -6071,6 +6243,7 @@ rec {
         version = "0.35.4";
         edition = "2021";
         sha256 = "0lgyk3fpp7krbyfra51pb4fqn6m3hk5gbj61fdvn3pffx5wnzrda";
+        libName = "polars_time";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -6149,6 +6322,7 @@ rec {
         version = "0.35.4";
         edition = "2021";
         sha256 = "1z7v7h54p883ww64mqpn4cphzwv0fd2bgsn8b1lx8qgyd60ycv6s";
+        libName = "polars_utils";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -6216,6 +6390,7 @@ rec {
         version = "1.5.1";
         edition = "2018";
         sha256 = "0fxg0i7n3wmffbfn95nwi062srdg40bwkj5143w1kk6pgw7apk1v";
+        libName = "portable_atomic";
         features = {
           "critical-section" = [ "dep:critical-section" ];
           "default" = [ "fallback" ];
@@ -6228,6 +6403,7 @@ rec {
         version = "0.2.17";
         edition = "2018";
         sha256 = "1pp6g52aw970adv3x2310n7glqnji96z0a9wiamzw89ibf0ayh2v";
+        libName = "ppv_lite86";
         authors = [
           "The CryptoCorrosion Contributors"
         ];
@@ -6275,6 +6451,7 @@ rec {
         version = "1.0.69";
         edition = "2021";
         sha256 = "1nljgyllbm3yr3pa081bf83gxh6l4zvjqzaldw7v4mj9xfgihk0k";
+        libName = "proc_macro2";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
           "Alex Crichton <alex@alexcrichton.com>"
@@ -6323,6 +6500,7 @@ rec {
         version = "0.12.2";
         edition = "2021";
         sha256 = "0f57mmf6cg7f4401x9s3fgdc1idnz7i1nxxjxyzi2jbhr22d18qz";
+        libName = "prost_build";
         authors = [
           "Dan Burkert <dan@danburkert.com>"
           "Lucio Franco <luciofranco14@gmail.com>"
@@ -6415,6 +6593,7 @@ rec {
         edition = "2021";
         sha256 = "1g268fzmswaf6rx1sm8000h6a4rigd4b6zg55wysi95cvyjifmq6";
         procMacro = true;
+        libName = "prost_derive";
         authors = [
           "Dan Burkert <dan@danburkert.com>"
           "Lucio Franco <luciofranco14@gmail.com>"
@@ -6452,6 +6631,7 @@ rec {
         version = "0.12.2";
         edition = "2021";
         sha256 = "0lls5w7yh2jxi764kd9k44dwskrr85j2fs335wg2i47m6qig6fc3";
+        libName = "prost_types";
         authors = [
           "Dan Burkert <dan@danburkert.com>"
           "Lucio Franco <luciofranco14@gmail.com"
@@ -6656,6 +6836,7 @@ rec {
         edition = "2021";
         links = "rayon-core";
         sha256 = "1vaq0q71yfvcwlmia0iqf6ixj2fibjcf2xjy92n1m1izv1mgpqsw";
+        libName = "rayon_core";
         authors = [
           "Niko Matsakis <niko@alum.mit.edu>"
           "Josh Stone <cuviper@gmail.com>"
@@ -6803,6 +6984,7 @@ rec {
         version = "0.4.3";
         edition = "2021";
         sha256 = "0gs8q9yhd3kcg4pr00ag4viqxnh5l7jpyb9fsfr8hzh451w4r02z";
+        libName = "regex_automata";
         authors = [
           "The Rust Project Developers"
           "Andrew Gallant <jamslam@gmail.com>"
@@ -6863,6 +7045,7 @@ rec {
         version = "0.8.2";
         edition = "2021";
         sha256 = "17rd2s8xbiyf6lb4aj2nfi44zqlj98g2ays8zzj2vfs743k79360";
+        libName = "regex_syntax";
         authors = [
           "The Rust Project Developers"
           "Andrew Gallant <jamslam@gmail.com>"
@@ -7343,6 +7526,7 @@ rec {
         version = "0.1.23";
         edition = "2015";
         sha256 = "0xnbk2bmyzshacjm2g1kd4zzv2y2az14bw3sjccq5qkpmsfvn9nn";
+        libName = "rustc_demangle";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -7523,6 +7707,7 @@ rec {
         version = "0.6.3";
         edition = "2021";
         sha256 = "007zind70rd5rfsrkdcfm8vn09j8sg02phg9334kark6rdscxam9";
+        libName = "rustls_native_certs";
         dependencies = [
           {
             name = "openssl-probe";
@@ -7551,6 +7736,7 @@ rec {
         version = "1.0.4";
         edition = "2018";
         sha256 = "1324n5bcns0rnw6vywr5agff3rwfvzphi7rmbyzwnv6glkhclx0w";
+        libName = "rustls_pemfile";
         dependencies = [
           {
             name = "base64";
@@ -7645,6 +7831,7 @@ rec {
         version = "2.9.2";
         edition = "2021";
         sha256 = "1pplxk15s5yxvi2m1sz5xfmjibp96cscdcl432w9jzbk0frlzdh5";
+        libName = "security_framework";
         authors = [
           "Steven Fackler <sfackler@gmail.com>"
           "Kornel <kornel@geekhood.net>"
@@ -7691,6 +7878,7 @@ rec {
         version = "2.9.1";
         edition = "2021";
         sha256 = "0yhciwlsy9dh0ps1gw3197kvyqx1bvc4knrhiznhid6kax196cp9";
+        libName = "security_framework_sys";
         authors = [
           "Steven Fackler <sfackler@gmail.com>"
           "Kornel <kornel@geekhood.net>"
@@ -7736,6 +7924,7 @@ rec {
         edition = "2018";
         sha256 = "1d50kbaslrrd0374ivx15jg57f03y5xzil1wd2ajlvajzlkbzw53";
         procMacro = true;
+        libName = "seq_macro";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -7940,6 +8129,7 @@ rec {
         version = "0.6.3";
         edition = "2018";
         sha256 = "0kp480744vkwg3fqx98379nsdw1lzzzimd88v0qgpqqic03afyzj";
+        libName = "sha2_asm";
         authors = [
           "RustCrypto Developers"
         ];
@@ -7953,12 +8143,16 @@ rec {
       };
       "shlex" = rec {
         crateName = "shlex";
-        version = "1.2.0";
+        version = "1.3.0";
         edition = "2015";
-        sha256 = "1033pj9dyb76nm5yv597nnvj3zpvr2aw9rm5wy0gah3dk99f1km7";
+        sha256 = "0r1y6bv26c1scpxvhg2cabimrmwgbp4p3wy6syj9n0c4s3q2znhg";
         authors = [
           "comex <comexk@gmail.com>"
           "Fenhl <fenhl@fenhl.net>"
+          "Adrian Taylor <adetaylor@chromium.org>"
+          "Alex Touchet <alextouchet@outlook.com>"
+          "Daniel Parks <dp+git@oxidized.org>"
+          "Garrett Berg <googberg@gmail.com>"
         ];
         features = {
           "default" = [ "std" ];
@@ -7970,6 +8164,7 @@ rec {
         version = "1.4.1";
         edition = "2015";
         sha256 = "18crkkw5k82bvcx088xlf5g4n3772m24qhzgfan80nda7d3rn8nq";
+        libName = "signal_hook_registry";
         authors = [
           "Michal 'vorner' Vaner <vorner@vorner.cz>"
           "Masaki Hara <ackie.h.gmai@gmail.com>"
@@ -8318,6 +8513,7 @@ rec {
         version = "0.1.2";
         edition = "2018";
         sha256 = "1wscqj3s30qknda778wf7z99mknk65p0h9hhs658l4pvkfqw6v5z";
+        libName = "streaming_decompression";
         dependencies = [
           {
             name = "fallible-streaming-iterator";
@@ -8331,6 +8527,7 @@ rec {
         version = "0.1.9";
         edition = "2021";
         sha256 = "0845zdv8qb7zwqzglpqc0830i43xh3fb6vqms155wz85qfvk28ib";
+        libName = "streaming_iterator";
         authors = [
           "Steven Fackler <sfackler@gmail.com>"
         ];
@@ -8527,6 +8724,7 @@ rec {
         version = "0.1.5";
         edition = "2021";
         sha256 = "1gb974chm9aj8ifkyibylxkyb5an4bf5y8dxb18pqmck698gmdfg";
+        libName = "target_features";
         authors = [
           "Caleb Zulawski <caleb.zulawski@gmail.com>"
         ];
@@ -8594,6 +8792,7 @@ rec {
         edition = "2021";
         sha256 = "1f0lmam4765sfnwr4b1n00y14vxh10g0311mkk0adr80pi02wsr6";
         procMacro = true;
+        libName = "thiserror_impl";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -8615,9 +8814,9 @@ rec {
       };
       "tokio" = rec {
         crateName = "tokio";
-        version = "1.34.0";
+        version = "1.37.0";
         edition = "2021";
-        sha256 = "1fgmssdga42a2hn9spm9dh1v9ajpcbs4r3svmzvk9s0iciv19h6h";
+        sha256 = "11v7qhvpwsf976frqgrjl1jy308bdkxq195gb38cypx7xkzypnqs";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -8729,6 +8928,7 @@ rec {
         edition = "2021";
         sha256 = "0fwjy4vdx1h9pi4g2nml72wi0fr27b5m954p13ji9anyy8l1x2jv";
         procMacro = true;
+        libName = "tokio_macros";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -8754,6 +8954,7 @@ rec {
         version = "0.23.4";
         edition = "2018";
         sha256 = "0nfsmmi8l1lgpbfy6079d5i13984djzcxrdr9jc06ghi0cwyhgn4";
+        libName = "tokio_rustls";
         authors = [
           "quininer kel <quininer@live.com>"
         ];
@@ -8792,6 +8993,7 @@ rec {
         version = "0.7.10";
         edition = "2021";
         sha256 = "058y6x4mf0fsqji9rfyb77qbfyc50y4pk2spqgj6xsyr693z66al";
+        libName = "tokio_util";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -8854,6 +9056,7 @@ rec {
         version = "0.3.2";
         edition = "2018";
         sha256 = "0lmfzmmvid2yp2l36mbavhmqgsvzqf7r2wiwz73ml4xmwaf1rg5n";
+        libName = "tower_service";
         authors = [
           "Tower Maintainers <team@tower-rs.com>"
         ];
@@ -8874,6 +9077,11 @@ rec {
             packageId = "pin-project-lite";
           }
           {
+            name = "tracing-attributes";
+            packageId = "tracing-attributes";
+            optional = true;
+          }
+          {
             name = "tracing-core";
             packageId = "tracing-core";
             usesDefaultFeatures = false;
@@ -8888,13 +9096,44 @@ rec {
           "tracing-attributes" = [ "dep:tracing-attributes" ];
           "valuable" = [ "tracing-core/valuable" ];
         };
-        resolvedDefaultFeatures = [ "std" ];
+        resolvedDefaultFeatures = [ "attributes" "default" "std" "tracing-attributes" ];
+      };
+      "tracing-attributes" = rec {
+        crateName = "tracing-attributes";
+        version = "0.1.27";
+        edition = "2018";
+        sha256 = "1rvb5dn9z6d0xdj14r403z0af0bbaqhg02hq4jc97g5wds6lqw1l";
+        procMacro = true;
+        libName = "tracing_attributes";
+        authors = [
+          "Tokio Contributors <team@tokio.rs>"
+          "Eliza Weisman <eliza@buoyant.io>"
+          "David Barsky <dbarsky@amazon.com>"
+        ];
+        dependencies = [
+          {
+            name = "proc-macro2";
+            packageId = "proc-macro2";
+          }
+          {
+            name = "quote";
+            packageId = "quote";
+          }
+          {
+            name = "syn";
+            packageId = "syn 2.0.39";
+            usesDefaultFeatures = false;
+            features = [ "full" "parsing" "printing" "visit-mut" "clone-impls" "extra-traits" "proc-macro" ];
+          }
+        ];
+        features = { };
       };
       "tracing-core" = rec {
         crateName = "tracing-core";
         version = "0.1.32";
         edition = "2018";
         sha256 = "0m5aglin3cdwxpvbg6kz0r9r0k31j48n0kcfwsp6l49z26k3svf0";
+        libName = "tracing_core";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -8918,6 +9157,7 @@ rec {
         version = "0.2.4";
         edition = "2015";
         sha256 = "1vc15paa4zi06ixsxihwbvfn24d708nsyg1ncgqwcrn42byyqa1m";
+        libName = "try_lock";
         authors = [
           "Sean McArthur <sean@seanmonstar.com>"
         ];
@@ -8943,6 +9183,7 @@ rec {
         version = "1.0.12";
         edition = "2018";
         sha256 = "0jzf1znfpb2gx8nr8mvmyqs1crnv79l57nxnbiszc7xf7ynbjm1k";
+        libName = "unicode_ident";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -8953,6 +9194,7 @@ rec {
         version = "0.1.11";
         edition = "2015";
         sha256 = "11ds4ydhg8g7l06rlmh712q41qsrd0j0h00n1jm74kww3kqk65z5";
+        libName = "unicode_width";
         authors = [
           "kwantam <kwantam@gmail.com>"
           "Manish Goregaokar <manishsmail@gmail.com>"
@@ -9046,6 +9288,7 @@ rec {
         version = "0.2.88";
         edition = "2018";
         sha256 = "1khgsh4z9bga35mjhg41dl7523i69ffc5m8ckhqaw6ssyabc5bkx";
+        libName = "wasm_bindgen";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -9077,6 +9320,7 @@ rec {
         version = "0.2.88";
         edition = "2018";
         sha256 = "05zj8yl243rvs87rhicq2l1d6443lnm6k90khf744khf9ikg95z3";
+        libName = "wasm_bindgen_backend";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -9122,6 +9366,7 @@ rec {
         edition = "2018";
         sha256 = "1chn3wgw9awmvs0fpmazbqyc5rwfgy3pj7lzwczmzb887dxh2qar";
         procMacro = true;
+        libName = "wasm_bindgen_macro";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -9146,6 +9391,7 @@ rec {
         version = "0.2.88";
         edition = "2018";
         sha256 = "01rrzg3y1apqygsjz1jg0n7p831nm4kdyxmxyl85x7v6mf6kndf5";
+        libName = "wasm_bindgen_macro_support";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -9184,6 +9430,7 @@ rec {
         edition = "2018";
         links = "wasm_bindgen";
         sha256 = "02vmw2rzsla1qm0zgfng4kqz52xn8k54v8ads4g1macv09fnq10d";
+        libName = "wasm_bindgen_shared";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -9194,6 +9441,7 @@ rec {
         version = "0.3.65";
         edition = "2018";
         sha256 = "11ba406ca9qssc21c37v49sn2y2gsdn6c3nva4hjf8v3yv2rkd2x";
+        libName = "web_sys";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -9729,12 +9977,12 @@ rec {
           {
             name = "winapi-i686-pc-windows-gnu";
             packageId = "winapi-i686-pc-windows-gnu";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "i686-pc-windows-gnu");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "i686-pc-windows-gnu");
           }
           {
             name = "winapi-x86_64-pc-windows-gnu";
             packageId = "winapi-x86_64-pc-windows-gnu";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnu");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "x86_64-pc-windows-gnu");
           }
         ];
         features = {
@@ -9747,6 +9995,7 @@ rec {
         version = "0.4.0";
         edition = "2015";
         sha256 = "1dmpa6mvcvzz16zg6d5vrfy4bxgg541wxrcip7cnshi06v38ffxc";
+        libName = "winapi_i686_pc_windows_gnu";
         authors = [
           "Peter Atashian <retep998@gmail.com>"
         ];
@@ -9757,6 +10006,7 @@ rec {
         version = "0.4.0";
         edition = "2015";
         sha256 = "0gqq64czqb64kskjryj8isp62m2sgvx25yyj3kpc2myh85w24bki";
+        libName = "winapi_x86_64_pc_windows_gnu";
         authors = [
           "Peter Atashian <retep998@gmail.com>"
         ];
@@ -9767,6 +10017,7 @@ rec {
         version = "0.51.1";
         edition = "2021";
         sha256 = "0r1f57hsshsghjyc7ypp2s0i78f7b1vr93w68sdb8baxyf2czy7i";
+        libName = "windows_core";
         authors = [
           "Microsoft"
         ];
@@ -9784,6 +10035,7 @@ rec {
         version = "0.45.0";
         edition = "2018";
         sha256 = "1l36bcqm4g89pknfp8r9rl1w4bn017q6a8qlx8viv0xjxzjkna3m";
+        libName = "windows_sys";
         authors = [
           "Microsoft"
         ];
@@ -10070,6 +10322,7 @@ rec {
         version = "0.48.0";
         edition = "2018";
         sha256 = "1aan23v5gs7gya1lc46hqn9mdh8yph3fhxmhxlw36pn6pqc28zb7";
+        libName = "windows_sys";
         authors = [
           "Microsoft"
         ];
@@ -10363,6 +10616,7 @@ rec {
         version = "0.52.0";
         edition = "2021";
         sha256 = "0gd3v4ji88490zgb6b5mq5zgbvwv7zx1ibn8v3x83rwcdbryaar8";
+        libName = "windows_sys";
         authors = [
           "Microsoft"
         ];
@@ -10610,6 +10864,7 @@ rec {
         version = "0.42.2";
         edition = "2018";
         sha256 = "0wfhnib2fisxlx8c507dbmh97kgij4r6kcxdi0f9nk6l1k080lcf";
+        libName = "windows_targets";
         authors = [
           "Microsoft"
         ];
@@ -10617,62 +10872,62 @@ rec {
           {
             name = "windows_aarch64_gnullvm";
             packageId = "windows_aarch64_gnullvm 0.42.2";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-pc-windows-gnullvm");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "aarch64-pc-windows-gnullvm");
           }
           {
             name = "windows_aarch64_msvc";
             packageId = "windows_aarch64_msvc 0.42.2";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-pc-windows-msvc");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "aarch64-pc-windows-msvc");
           }
           {
             name = "windows_aarch64_msvc";
             packageId = "windows_aarch64_msvc 0.42.2";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-uwp-windows-msvc");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "aarch64-uwp-windows-msvc");
           }
           {
             name = "windows_i686_gnu";
             packageId = "windows_i686_gnu 0.42.2";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "i686-pc-windows-gnu");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "i686-pc-windows-gnu");
           }
           {
             name = "windows_i686_gnu";
             packageId = "windows_i686_gnu 0.42.2";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "i686-uwp-windows-gnu");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "i686-uwp-windows-gnu");
           }
           {
             name = "windows_i686_msvc";
             packageId = "windows_i686_msvc 0.42.2";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "i686-pc-windows-msvc");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "i686-pc-windows-msvc");
           }
           {
             name = "windows_i686_msvc";
             packageId = "windows_i686_msvc 0.42.2";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "i686-uwp-windows-msvc");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "i686-uwp-windows-msvc");
           }
           {
             name = "windows_x86_64_gnu";
             packageId = "windows_x86_64_gnu 0.42.2";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnu");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "x86_64-pc-windows-gnu");
           }
           {
             name = "windows_x86_64_gnu";
             packageId = "windows_x86_64_gnu 0.42.2";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-uwp-windows-gnu");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "x86_64-uwp-windows-gnu");
           }
           {
             name = "windows_x86_64_gnullvm";
             packageId = "windows_x86_64_gnullvm 0.42.2";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnullvm");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "x86_64-pc-windows-gnullvm");
           }
           {
             name = "windows_x86_64_msvc";
             packageId = "windows_x86_64_msvc 0.42.2";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-msvc");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "x86_64-pc-windows-msvc");
           }
           {
             name = "windows_x86_64_msvc";
             packageId = "windows_x86_64_msvc 0.42.2";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-uwp-windows-msvc");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "x86_64-uwp-windows-msvc");
           }
         ];
 
@@ -10682,6 +10937,7 @@ rec {
         version = "0.48.5";
         edition = "2018";
         sha256 = "034ljxqshifs1lan89xwpcy1hp0lhdh4b5n0d2z4fwjx2piacbws";
+        libName = "windows_targets";
         authors = [
           "Microsoft"
         ];
@@ -10689,7 +10945,7 @@ rec {
           {
             name = "windows_aarch64_gnullvm";
             packageId = "windows_aarch64_gnullvm 0.48.5";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-pc-windows-gnullvm");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "aarch64-pc-windows-gnullvm");
           }
           {
             name = "windows_aarch64_msvc";
@@ -10714,7 +10970,7 @@ rec {
           {
             name = "windows_x86_64_gnullvm";
             packageId = "windows_x86_64_gnullvm 0.48.5";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnullvm");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "x86_64-pc-windows-gnullvm");
           }
           {
             name = "windows_x86_64_msvc";
@@ -10729,6 +10985,7 @@ rec {
         version = "0.52.0";
         edition = "2021";
         sha256 = "1kg7a27ynzw8zz3krdgy6w5gbqcji27j1sz4p7xk2j5j8082064a";
+        libName = "windows_targets";
         authors = [
           "Microsoft"
         ];
@@ -10736,7 +10993,7 @@ rec {
           {
             name = "windows_aarch64_gnullvm";
             packageId = "windows_aarch64_gnullvm 0.52.0";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-pc-windows-gnullvm");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "aarch64-pc-windows-gnullvm");
           }
           {
             name = "windows_aarch64_msvc";
@@ -10761,7 +11018,7 @@ rec {
           {
             name = "windows_x86_64_gnullvm";
             packageId = "windows_x86_64_gnullvm 0.52.0";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnullvm");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "x86_64-pc-windows-gnullvm");
           }
           {
             name = "windows_x86_64_msvc";
@@ -10998,6 +11255,7 @@ rec {
         version = "0.8.7";
         edition = "2018";
         sha256 = "0yz037yrkn0qa0g0r6733ynd1xbw7zvx58v6qylhyi2kv9wb2a4q";
+        libName = "xxhash_rust";
         authors = [
           "Douman <douman@gmx.se>"
         ];
@@ -11027,9 +11285,9 @@ rec {
       };
       "zerocopy" = rec {
         crateName = "zerocopy";
-        version = "0.7.26";
+        version = "0.7.35";
         edition = "2018";
-        sha256 = "184s51bzn8mpx3p9h6klrlppv9b7ja1b8y9998jr36jmj1a42zp9";
+        sha256 = "1w36q7b9il2flg0qskapgi9ymgg7p985vniqd09vi0mwib8lz6qv";
         authors = [
           "Joshua Liebow-Feeser <joshlf@google.com>"
         ];
@@ -11063,10 +11321,11 @@ rec {
       };
       "zerocopy-derive" = rec {
         crateName = "zerocopy-derive";
-        version = "0.7.26";
+        version = "0.7.35";
         edition = "2018";
-        sha256 = "17v8yshfa23z5az2xhclpwrlzih26nj7imwbra12i5b6y764hznx";
+        sha256 = "0gnf2ap2y92nwdalzz3x7142f2b83sni66l39vxp2ijd6j080kzs";
         procMacro = true;
+        libName = "zerocopy_derive";
         authors = [
           "Joshua Liebow-Feeser <joshlf@google.com>"
         ];
@@ -11141,6 +11400,7 @@ rec {
         version = "7.0.0";
         edition = "2018";
         sha256 = "0gpav2lcibrpmyslmjkcn3w0w64qif3jjljd2h8lr4p249s7qx23";
+        libName = "zstd_safe";
         authors = [
           "Alexandre Bury <alexandre.bury@gmail.com>"
         ];
@@ -11174,6 +11434,7 @@ rec {
         edition = "2018";
         links = "zstd";
         sha256 = "0mk6a2367swdi22zg03lcackpnvgq96d7120awd4i83lm2lfy5ly";
+        libName = "zstd_sys";
         authors = [
           "Alexandre Bury <alexandre.bury@gmail.com>"
         ];
@@ -11209,14 +11470,11 @@ rec {
       fuchsia = true;
       test = false;
 
-      /* We are choosing an arbitrary rust version to grab `lib` from,
-      which is unfortunate, but `lib` has been version-agnostic the
-      whole time so this is good enough for now.
-      */
-      os = pkgs.rust.lib.toTargetOs platform;
-      arch = pkgs.rust.lib.toTargetArch platform;
-      family = pkgs.rust.lib.toTargetFamily platform;
-      vendor = pkgs.rust.lib.toTargetVendor platform;
+      inherit (platform.rust.platform)
+        arch
+        os
+        vendor;
+      family = platform.rust.platform.target-family;
       env = "gnu";
       endian =
         if platform.parsed.cpu.significantByte.name == "littleEndian"
@@ -11293,6 +11551,7 @@ rec {
               (
                 _: {
                   buildTests = true;
+                  release = false;
                 }
               );
             # If the user hasn't set any pre/post commands, we don't want to
@@ -11305,51 +11564,41 @@ rec {
                 testPostRun
               ]);
           in
-          pkgs.runCommand "run-tests-${testCrate.name}"
-            {
-              inherit testCrateFlags;
-              buildInputs = testInputs;
-            } ''
-            set -e
-
-            export RUST_BACKTRACE=1
-
-            # recreate a file hierarchy as when running tests with cargo
-
-            # the source for test data
-            # It's necessary to locate the source in $NIX_BUILD_TOP/source/
-            # instead of $NIX_BUILD_TOP/
-            # because we compiled those test binaries in the former and not the latter.
-            # So all paths will expect source tree to be there and not in the build top directly.
-            # For example: $NIX_BUILD_TOP := /build in general, if you ask yourself.
-            # TODO(raitobezarius): I believe there could be more edge cases if `crate.sourceRoot`
-            # do exist but it's very hard to reason about them, so let's wait until the first bug report.
-            mkdir -p source/
-            cd source/
-
-            ${pkgs.buildPackages.xorg.lndir}/bin/lndir ${crate.src}
-
-            # build outputs
-            testRoot=target/debug
-            mkdir -p $testRoot
-
-            # executables of the crate
-            # we copy to prevent std::env::current_exe() to resolve to a store location
-            for i in ${crate}/bin/*; do
-              cp "$i" "$testRoot"
-            done
-            chmod +w -R .
-
-            # test harness executables are suffixed with a hash, like cargo does
-            # this allows to prevent name collision with the main
-            # executables of the crate
-            hash=$(basename $out)
-            for file in ${drv}/tests/*; do
-              f=$testRoot/$(basename $file)-$hash
-              cp $file $f
-              ${testCommand}
-            done
-          '';
+          pkgs.stdenvNoCC.mkDerivation {
+            name = "run-tests-${testCrate.name}";
+
+            inherit (crate) src;
+
+            inherit testCrateFlags;
+
+            buildInputs = testInputs;
+
+            buildPhase = ''
+              set -e
+              export RUST_BACKTRACE=1
+
+              # build outputs
+              testRoot=target/debug
+              mkdir -p $testRoot
+
+              # executables of the crate
+              # we copy to prevent std::env::current_exe() to resolve to a store location
+              for i in ${crate}/bin/*; do
+                cp "$i" "$testRoot"
+              done
+              chmod +w -R .
+
+              # test harness executables are suffixed with a hash, like cargo does
+              # this allows to prevent name collision with the main
+              # executables of the crate
+              hash=$(basename $out)
+              for file in ${drv}/tests/*; do
+                f=$testRoot/$(basename $file)-$hash
+                cp $file $f
+                ${testCommand}
+              done
+            '';
+          };
       in
       pkgs.runCommand "${crate.name}-linked"
         {
@@ -11458,7 +11707,7 @@ rec {
             let
               self = {
                 crates = lib.mapAttrs (packageId: value: buildByPackageIdForPkgsImpl self pkgs packageId) crateConfigs;
-                target = makeTarget pkgs.stdenv.hostPlatform;
+                target = makeTarget stdenv.hostPlatform;
                 build = mkBuiltByPackageIdByPkgs pkgs.buildPackages;
               };
             in
@@ -11533,8 +11782,6 @@ rec {
             buildRustCrateForPkgsFunc pkgs
               (
                 crateConfig // {
-                  # https://github.com/NixOS/nixpkgs/issues/218712
-                  dontStrip = stdenv.hostPlatform.isDarwin;
                   src = crateConfig.src or (
                     pkgs.fetchurl rec {
                       name = "${crateConfig.crateName}-${crateConfig.version}.tar.gz";
diff --git a/tvix/tools/crunch-v2/Cargo.toml b/tvix/tools/crunch-v2/Cargo.toml
index d2b7126bd241..4421c8b9ab34 100644
--- a/tvix/tools/crunch-v2/Cargo.toml
+++ b/tvix/tools/crunch-v2/Cargo.toml
@@ -11,7 +11,7 @@ anyhow = { version = "1.0.75", features = ["backtrace"] }
 lazy_static = "1.4.0"
 
 bstr = "1.8.0"
-bytes = "1.5.0"
+bytes = "1.6.1"
 
 futures = "0.3.29"
 tokio = { version = "1.37.0", features = ["full"] }
diff --git a/tvix/tools/crunch-v2/default.nix b/tvix/tools/crunch-v2/default.nix
index 689e86be6559..8014aa9443c8 100644
--- a/tvix/tools/crunch-v2/default.nix
+++ b/tvix/tools/crunch-v2/default.nix
@@ -1,15 +1,15 @@
-{ pkgs, ... }:
+{ pkgs, depot, lib, ... }:
 
-let
-  crates = import ./Cargo.nix {
-    inherit pkgs;
-    nixpkgs = pkgs.path;
-
-    defaultCrateOverrides = pkgs.defaultCrateOverrides // {
-      crunch-v2 = prev: {
-        nativeBuildInputs = (prev.nativeBuildInputs or [ ]) ++ [ pkgs.buildPackages.protobuf ];
+(pkgs.callPackage ./Cargo.nix {
+  defaultCrateOverrides = (depot.tvix.utils.defaultCrateOverridesForPkgs pkgs) // {
+    crunch-v2 = prev: {
+      src = depot.tvix.utils.filterRustCrateSrc rec {
+        root = prev.src.origSrc;
+        extraFileset = lib.fileset.fileFilter (f: f.hasExt "proto") root;
       };
+      nativeBuildInputs = [ pkgs.protobuf ];
     };
   };
-in
-crates.rootCrate.build
+}).rootCrate.build.overrideAttrs {
+  meta.ci.extraSteps.crate2nix-check = depot.tvix.utils.mkCrate2nixCheck ./Cargo.nix;
+}
diff --git a/tvix/tools/narinfo2parquet/Cargo.lock b/tvix/tools/narinfo2parquet/Cargo.lock
index 070a4685105a..151ce0b47f44 100644
--- a/tvix/tools/narinfo2parquet/Cargo.lock
+++ b/tvix/tools/narinfo2parquet/Cargo.lock
@@ -271,9 +271,9 @@ dependencies = [
 
 [[package]]
 name = "bytes"
-version = "1.5.0"
+version = "1.6.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223"
+checksum = "a12916984aab3fa6e39d655a33e09c0071eb36d6ab3aea5c2d78551f1df6d952"
 
 [[package]]
 name = "cc"
@@ -398,16 +398,15 @@ dependencies = [
 
 [[package]]
 name = "curve25519-dalek"
-version = "4.1.1"
+version = "4.1.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c"
+checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be"
 dependencies = [
  "cfg-if",
  "cpufeatures",
  "curve25519-dalek-derive",
  "digest",
  "fiat-crypto",
- "platforms",
  "rustc_version",
  "subtle",
  "zeroize",
@@ -426,9 +425,9 @@ dependencies = [
 
 [[package]]
 name = "data-encoding"
-version = "2.4.0"
+version = "2.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308"
+checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2"
 
 [[package]]
 name = "der"
@@ -817,6 +816,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
 
 [[package]]
+name = "libmimalloc-sys"
+version = "0.1.39"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "23aa6811d3bd4deb8a84dde645f943476d13b248d818edcf8ce0b2f37f036b44"
+dependencies = [
+ "cc",
+ "libc",
+]
+
+[[package]]
 name = "linux-raw-sys"
 version = "0.4.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -873,6 +882,15 @@ dependencies = [
 ]
 
 [[package]]
+name = "mimalloc"
+version = "0.1.43"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68914350ae34959d83f732418d51e2427a794055d0b9529f48259ac07af65633"
+dependencies = [
+ "libmimalloc-sys",
+]
+
+[[package]]
 name = "minimal-lexical"
 version = "0.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -938,11 +956,13 @@ version = "0.1.0"
 dependencies = [
  "bitflags 2.4.1",
  "bstr",
+ "bytes",
  "data-encoding",
  "ed25519",
  "ed25519-dalek",
  "enum-primitive-derive",
  "glob",
+ "mimalloc",
  "nom",
  "num-traits",
  "pin-project-lite",
@@ -951,6 +971,7 @@ dependencies = [
  "sha2",
  "thiserror",
  "tokio",
+ "tracing",
 ]
 
 [[package]]
@@ -1070,12 +1091,6 @@ dependencies = [
 ]
 
 [[package]]
-name = "platforms"
-version = "3.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "14e6ab3f592e6fb464fc9712d8d6e6912de6473954635fd76a589d832cffcbb0"
-
-[[package]]
 name = "polars"
 version = "0.36.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1860,6 +1875,37 @@ dependencies = [
 ]
 
 [[package]]
+name = "tracing"
+version = "0.1.40"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
+dependencies = [
+ "pin-project-lite",
+ "tracing-attributes",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-attributes"
+version = "0.1.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.39",
+]
+
+[[package]]
+name = "tracing-core"
+version = "0.1.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
+dependencies = [
+ "once_cell",
+]
+
+[[package]]
 name = "typenum"
 version = "1.17.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/tvix/tools/narinfo2parquet/Cargo.nix b/tvix/tools/narinfo2parquet/Cargo.nix
index 0ae873c8443c..9804b92a5e76 100644
--- a/tvix/tools/narinfo2parquet/Cargo.nix
+++ b/tvix/tools/narinfo2parquet/Cargo.nix
@@ -1,4 +1,4 @@
-# This file was @generated by crate2nix 0.14.0 with the command:
+# This file was @generated by crate2nix 0.14.1 with the command:
 #   "generate" "--all-features"
 # See https://github.com/kolloch/crate2nix for more info.
 
@@ -13,6 +13,8 @@
 , rootFeatures ? [ "default" ]
   # If true, throw errors instead of issueing deprecation warnings.
 , strictDeprecation ? false
+  # Elements to add to the `-C target-feature=` argument passed to `rustc`
+  # (separated by `,`, prefixed with `+`).
   # Used for conditional compilation based on CPU feature detection.
 , targetFeatures ? [ ]
   # Whether to perform release builds: longer compile times, faster binaries.
@@ -205,6 +207,7 @@ rec {
         edition = "2015";
         crateBin = [ ];
         sha256 = "1cy6r2sfv5y5cigv86vms7n5nlwhx1rbyxwcraqnmm1rxiib2yyc";
+        libName = "alloc_no_stdlib";
         authors = [
           "Daniel Reiter Horn <danielrh@dropbox.com>"
         ];
@@ -216,6 +219,7 @@ rec {
         edition = "2015";
         crateBin = [ ];
         sha256 = "1kkfbld20ab4165p29v172h8g0wvq8i06z8vnng14whw0isq5ywl";
+        libName = "alloc_stdlib";
         authors = [
           "Daniel Reiter Horn <danielrh@dropbox.com>"
         ];
@@ -232,6 +236,7 @@ rec {
         version = "0.2.16";
         edition = "2018";
         sha256 = "1iayppgq4wqbfbfcqmsbwgamj0s65012sskfvyx07pxavk3gyhh9";
+        libName = "allocator_api2";
         authors = [
           "Zakarum <zaq.dev@icloud.com>"
         ];
@@ -247,6 +252,7 @@ rec {
         version = "0.1.1";
         edition = "2018";
         sha256 = "1w7ynjxrfs97xg3qlcdns4kgfpwcdv824g611fq32cag4cdr96g9";
+        libName = "android_tzdata";
         authors = [
           "RumovZ"
         ];
@@ -318,6 +324,7 @@ rec {
         version = "0.2.0";
         edition = "2021";
         sha256 = "0xpbqf7qkvzplpjd7f0wbcf2n1v9vygdccwxkd1amxp4il0hlzdz";
+        libName = "array_init_cursor";
 
       };
       "arrow-format" = rec {
@@ -325,6 +332,7 @@ rec {
         version = "0.8.1";
         edition = "2018";
         sha256 = "1irj67p6c224dzw86jr7j3z9r5zfid52gy6ml8rdqk4r2si4x207";
+        libName = "arrow_format";
         authors = [
           "Jorge C. Leitao <jorgecarleitao@gmail.com>"
         ];
@@ -360,6 +368,7 @@ rec {
         version = "0.3.5";
         edition = "2018";
         sha256 = "0l8sjq1rylkb1ak0pdyjn83b3k6x36j22myngl4sqqgg7whdsmnd";
+        libName = "async_stream";
         authors = [
           "Carl Lerche <me@carllerche.com>"
         ];
@@ -385,6 +394,7 @@ rec {
         edition = "2018";
         sha256 = "14q179j4y8p2z1d0ic6aqgy9fhwz8p9cai1ia8kpw4bw7q12mrhn";
         procMacro = true;
+        libName = "async_stream_impl";
         authors = [
           "Carl Lerche <me@carllerche.com>"
         ];
@@ -411,6 +421,7 @@ rec {
         edition = "2021";
         sha256 = "1ydhbsqjqqa6bxbv0kgys2wq2vi3jpwjy57dk162ajwppgqkfrd6";
         procMacro = true;
+        libName = "async_trait";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -603,6 +614,7 @@ rec {
         version = "0.10.4";
         edition = "2018";
         sha256 = "0w9sa2ypmrsqqvc20nhwr75wbb5cjr4kkyhpjm1z1lv2kdicfy1h";
+        libName = "block_buffer";
         authors = [
           "RustCrypto Developers"
         ];
@@ -658,6 +670,7 @@ rec {
         edition = "2015";
         crateBin = [ ];
         sha256 = "0kyyh9701dwqzwvn2frff4ww0zibikqd1s1xvl7n1pfpc3z4lbjf";
+        libName = "brotli_decompressor";
         authors = [
           "Daniel Reiter Horn <danielrh@dropbox.com>"
           "The Brotli Authors"
@@ -780,9 +793,9 @@ rec {
       };
       "bytes" = rec {
         crateName = "bytes";
-        version = "1.5.0";
+        version = "1.6.1";
         edition = "2018";
-        sha256 = "08w2i8ac912l8vlvkv3q51cd4gr09pwlg3sjsjffcizlrb0i5gd2";
+        sha256 = "0lnryqfiymbq5mfflfmbsqvfnw80kkh36nk5kpiscgxb9ac1cad1";
         authors = [
           "Carl Lerche <me@carllerche.com>"
           "Sean McArthur <sean@seanmonstar.com>"
@@ -826,6 +839,7 @@ rec {
         version = "1.0.0";
         edition = "2018";
         sha256 = "1za0vb97n4brpzpv8lsbnzmq5r8f2b0cpqqr0sy8h5bn751xxwds";
+        libName = "cfg_if";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -890,6 +904,7 @@ rec {
         version = "0.9.5";
         edition = "2021";
         sha256 = "0vxb4d25mgk8y0phay7j078limx2553716ixsr1x5605k31j5h98";
+        libName = "const_oid";
         authors = [
           "RustCrypto Developers"
         ];
@@ -902,6 +917,7 @@ rec {
         version = "0.8.4";
         edition = "2015";
         sha256 = "1yhf471qj6snnm2mcswai47vsbc9w30y4abmdp4crb4av87sb5p4";
+        libName = "core_foundation_sys";
         authors = [
           "The Servo Project Developers"
         ];
@@ -919,7 +935,7 @@ rec {
           {
             name = "libc";
             packageId = "libc";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-linux-android");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "aarch64-linux-android");
           }
           {
             name = "libc";
@@ -964,6 +980,7 @@ rec {
         version = "0.5.8";
         edition = "2018";
         sha256 = "004jz4wxp9k26z657i7rsh9s7586dklx2c5aqf1n3w1dgzvjng53";
+        libName = "crossbeam_channel";
         dependencies = [
           {
             name = "cfg-if";
@@ -988,6 +1005,7 @@ rec {
         version = "0.8.3";
         edition = "2018";
         sha256 = "1vqczbcild7nczh5z116w8w46z991kpjyw7qxkf24c14apwdcvyf";
+        libName = "crossbeam_deque";
         dependencies = [
           {
             name = "cfg-if";
@@ -1019,6 +1037,7 @@ rec {
         version = "0.9.15";
         edition = "2018";
         sha256 = "1ixwc3cq816wb8rlh3ix4jnybqbyyq4l61nwlx0mfm3ck0s148df";
+        libName = "crossbeam_epoch";
         dependencies = [
           {
             name = "cfg-if";
@@ -1059,6 +1078,7 @@ rec {
         version = "0.3.8";
         edition = "2018";
         sha256 = "1p9s6n4ckwdgxkb7a8ay9zjzmgc8ppfbxix2vr07rwskibmb7kyi";
+        libName = "crossbeam_queue";
         dependencies = [
           {
             name = "cfg-if";
@@ -1082,6 +1102,7 @@ rec {
         version = "0.8.16";
         edition = "2018";
         sha256 = "153j0gikblz7n7qdvdi8pslhi008s1yp9cmny6vw07ad7pbb48js";
+        libName = "crossbeam_utils";
         dependencies = [
           {
             name = "cfg-if";
@@ -1099,6 +1120,7 @@ rec {
         version = "0.1.6";
         edition = "2018";
         sha256 = "1cvby95a6xg7kxdz5ln3rl9xh66nz66w46mm3g56ri1z5x815yqv";
+        libName = "crypto_common";
         authors = [
           "RustCrypto Developers"
         ];
@@ -1121,9 +1143,10 @@ rec {
       };
       "curve25519-dalek" = rec {
         crateName = "curve25519-dalek";
-        version = "4.1.1";
+        version = "4.1.3";
         edition = "2021";
-        sha256 = "0p7ns5917k6369gajrsbfj24llc5zfm635yh3abla7sb5rm8r6z8";
+        sha256 = "1gmjb9dsknrr8lypmhkyjd67p1arb8mbfamlwxm7vph38my8pywp";
+        libName = "curve25519_dalek";
         authors = [
           "Isis Lovecruft <isis@patternsinthevoid.net>"
           "Henry de Valence <hdevalence@hdevalence.ca>"
@@ -1169,10 +1192,6 @@ rec {
         ];
         buildDependencies = [
           {
-            name = "platforms";
-            packageId = "platforms";
-          }
-          {
             name = "rustc_version";
             packageId = "rustc_version";
           }
@@ -1196,6 +1215,7 @@ rec {
         edition = "2021";
         sha256 = "1cry71xxrr0mcy5my3fb502cwfxy6822k4pm19cwrilrg7hq4s7l";
         procMacro = true;
+        libName = "curve25519_dalek_derive";
         dependencies = [
           {
             name = "proc-macro2";
@@ -1215,9 +1235,10 @@ rec {
       };
       "data-encoding" = rec {
         crateName = "data-encoding";
-        version = "2.4.0";
+        version = "2.6.0";
         edition = "2018";
-        sha256 = "023k3dk8422jgbj7k72g63x51h1mhv91dhw1j4h205vzh6fnrrn2";
+        sha256 = "1qnn68n4vragxaxlkqcb1r28d3hhj43wch67lm4rpxlw89wnjmp8";
+        libName = "data_encoding";
         authors = [
           "Julien Cretin <git@ia0.eu>"
         ];
@@ -1301,6 +1322,7 @@ rec {
         version = "1.0.16";
         edition = "2018";
         sha256 = "0pa9kas6a241pbx0q82ipwi4f7m7wwyzkkc725caky24gl4j4nsl";
+        libName = "dyn_clone";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -1343,6 +1365,7 @@ rec {
         version = "2.1.0";
         edition = "2021";
         sha256 = "1h13qm789m9gdjl6jazss80hqi8ll37m0afwcnw23zcbqjp8wqhz";
+        libName = "ed25519_dalek";
         authors = [
           "isis lovecruft <isis@patternsinthevoid.net>"
           "Tony Arcieri <bascule@gmail.com>"
@@ -1435,6 +1458,7 @@ rec {
         edition = "2018";
         sha256 = "0k6wcf58h5kh64yq5nfq71va53kaya0kzxwsjwbgwm2n2zd9axxs";
         procMacro = true;
+        libName = "enum_primitive_derive";
         authors = [
           "Doug Goldstein <cardoe@cardoe.com>"
         ];
@@ -1551,6 +1575,7 @@ rec {
         version = "0.1.9";
         edition = "2015";
         sha256 = "0nj6j26p71bjy8h42x6jahx1hn0ng6mc2miwpgwnp8vnwqf4jq3k";
+        libName = "fallible_streaming_iterator";
         authors = [
           "Steven Fackler <sfackler@gmail.com>"
         ];
@@ -1561,6 +1586,7 @@ rec {
         version = "0.2.0";
         edition = "2018";
         sha256 = "0g7kfll3xyh99kc7r352lhljnwvgayxxa6saifb6725inikmyxlm";
+        libName = "fast_float";
         authors = [
           "Ivan Smirnov <i.s.smirnov@gmail.com>"
         ];
@@ -1590,6 +1616,7 @@ rec {
         version = "0.2.5";
         edition = "2018";
         sha256 = "1dxn0g50pv0ppal779vi7k40fr55pbhkyv4in7i13pgl4sn3wmr7";
+        libName = "fiat_crypto";
         authors = [
           "Fiat Crypto library authors <jgross@mit.edu>"
         ];
@@ -1719,6 +1746,7 @@ rec {
         version = "0.3.29";
         edition = "2018";
         sha256 = "1jxsifvrbqzdadk0svbax71cba5d3qg3wgjq8i160mxmd1kdckgz";
+        libName = "futures_channel";
         dependencies = [
           {
             name = "futures-core";
@@ -1746,6 +1774,7 @@ rec {
         version = "0.3.29";
         edition = "2018";
         sha256 = "1308bpj0g36nhx2y6bl4mm6f1gnh9xyvvw2q2wpdgnb6dv3247gb";
+        libName = "futures_core";
         features = {
           "default" = [ "std" ];
           "portable-atomic" = [ "dep:portable-atomic" ];
@@ -1758,6 +1787,7 @@ rec {
         version = "0.3.29";
         edition = "2018";
         sha256 = "1g4pjni0sw28djx6mlcfz584abm2lpifz86cmng0kkxh7mlvhkqg";
+        libName = "futures_executor";
         dependencies = [
           {
             name = "futures-core";
@@ -1788,6 +1818,7 @@ rec {
         version = "0.3.29";
         edition = "2018";
         sha256 = "1ajsljgny3zfxwahba9byjzclrgvm1ypakca8z854k2w7cb4mwwb";
+        libName = "futures_io";
         features = {
           "default" = [ "std" ];
         };
@@ -1799,6 +1830,7 @@ rec {
         edition = "2018";
         sha256 = "1nwd18i8kvpkdfwm045hddjli0n96zi7pn6f99zi9c74j7ym7cak";
         procMacro = true;
+        libName = "futures_macro";
         dependencies = [
           {
             name = "proc-macro2";
@@ -1821,6 +1853,7 @@ rec {
         version = "0.3.29";
         edition = "2018";
         sha256 = "05q8jykqddxzp8nwf00wjk5m5mqi546d7i8hsxma7hiqxrw36vg3";
+        libName = "futures_sink";
         features = {
           "default" = [ "std" ];
           "std" = [ "alloc" ];
@@ -1832,6 +1865,7 @@ rec {
         version = "0.3.29";
         edition = "2018";
         sha256 = "1qmsss8rb5ppql4qvd4r70h9gpfcpd0bg2b3qilxrnhdkc397lgg";
+        libName = "futures_task";
         features = {
           "default" = [ "std" ];
           "std" = [ "alloc" ];
@@ -1843,6 +1877,7 @@ rec {
         version = "0.3.29";
         edition = "2018";
         sha256 = "0141rkqh0psj4h8x8lgsl1p29dhqr7z2wcixkcbs60z74kb2d5d1";
+        libName = "futures_util";
         dependencies = [
           {
             name = "futures-channel";
@@ -2096,6 +2131,7 @@ rec {
         version = "0.3.3";
         edition = "2021";
         sha256 = "1dyc8qsjh876n74a3rcz8h43s27nj1sypdhsn2ms61bd3b47wzyp";
+        libName = "hermit_abi";
         authors = [
           "Stefan Lankes"
         ];
@@ -2130,6 +2166,7 @@ rec {
         version = "0.1.58";
         edition = "2018";
         sha256 = "081vcr8z8ddhl5r1ywif6grnswk01b2ac4nks2bhn8zzdimvh9l3";
+        libName = "iana_time_zone";
         authors = [
           "Andrew Straw <strawman@astraw.com>"
           "Renรฉ Kijewski <rene.kijewski@fu-berlin.de>"
@@ -2175,6 +2212,7 @@ rec {
         version = "0.1.2";
         edition = "2018";
         sha256 = "17r6jmj31chn7xs9698r122mapq85mfnv98bb4pg6spm0si2f67k";
+        libName = "iana_time_zone_haiku";
         authors = [
           "Renรฉ Kijewski <crates.io@k6i.de>"
         ];
@@ -2232,6 +2270,7 @@ rec {
         edition = "2018";
         links = "jemalloc";
         sha256 = "1wpbpwhfs6wd484cdfpl0zdf441ann9wj0fypy67i8ffw531jv5c";
+        libName = "jemalloc_sys";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
           "Gonzalo Brito Gadeschi <gonzalobg88@gmail.com>"
@@ -2314,6 +2353,7 @@ rec {
         version = "0.3.65";
         edition = "2018";
         sha256 = "1s1gaxgzpqfyygc7f2pwp9y128rh5f8zvsc4nm5yazgna9cw7h2l";
+        libName = "js_sys";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -2355,11 +2395,39 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" ];
       };
+      "libmimalloc-sys" = rec {
+        crateName = "libmimalloc-sys";
+        version = "0.1.39";
+        edition = "2018";
+        links = "mimalloc";
+        sha256 = "0i3b0dzz7cp0ik7ys66q92r16va78gwlbrnxhj5fnkdxsc8niai3";
+        libName = "libmimalloc_sys";
+        authors = [
+          "Octavian Oncescu <octavonce@gmail.com>"
+        ];
+        dependencies = [
+          {
+            name = "libc";
+            packageId = "libc";
+          }
+        ];
+        buildDependencies = [
+          {
+            name = "cc";
+            packageId = "cc";
+          }
+        ];
+        features = {
+          "cty" = [ "dep:cty" ];
+          "extended" = [ "cty" ];
+        };
+      };
       "linux-raw-sys" = rec {
         crateName = "linux-raw-sys";
         version = "0.4.10";
         edition = "2021";
         sha256 = "0gz0671d4hgrdngrryaajxl962ny4g40pykg0vq0pr32q3l7j96s";
+        libName = "linux_raw_sys";
         authors = [
           "Dan Gohman <dev@sunfishcode.online>"
         ];
@@ -2419,6 +2487,7 @@ rec {
         edition = "2015";
         links = "lz4";
         sha256 = "0059ik4xlvnss5qfh6l691psk4g3350ljxaykzv10yr0gqqppljp";
+        libName = "lz4_sys";
         authors = [
           "Jens Heyens <jens.heyens@ewetel.net>"
           "Artem V. Navrotskiy <bozaro@buzzsoft.ru>"
@@ -2495,11 +2564,40 @@ rec {
         features = { };
         resolvedDefaultFeatures = [ "default" ];
       };
+      "mimalloc" = rec {
+        crateName = "mimalloc";
+        version = "0.1.43";
+        edition = "2018";
+        sha256 = "0csnyrxc16i592gm5ffham07jyj2w98qsh9jyy1rv59lmr8474b8";
+        authors = [
+          "Octavian Oncescu <octavonce@gmail.com>"
+          "Vincent Rouillรฉ <vincent@speedy37.fr>"
+          "Thom Chiovoloni <chiovolonit@gmail.com>"
+        ];
+        dependencies = [
+          {
+            name = "libmimalloc-sys";
+            packageId = "libmimalloc-sys";
+            usesDefaultFeatures = false;
+          }
+        ];
+        features = {
+          "debug" = [ "libmimalloc-sys/debug" ];
+          "debug_in_debug" = [ "libmimalloc-sys/debug_in_debug" ];
+          "extended" = [ "libmimalloc-sys/extended" ];
+          "local_dynamic_tls" = [ "libmimalloc-sys/local_dynamic_tls" ];
+          "no_thp" = [ "libmimalloc-sys/no_thp" ];
+          "override" = [ "libmimalloc-sys/override" ];
+          "secure" = [ "libmimalloc-sys/secure" ];
+        };
+        resolvedDefaultFeatures = [ "default" ];
+      };
       "minimal-lexical" = rec {
         crateName = "minimal-lexical";
         version = "0.2.1";
         edition = "2018";
         sha256 = "16ppc5g84aijpri4jzv14rvcnslvlpphbszc7zzp6vfkddf4qdb8";
+        libName = "minimal_lexical";
         authors = [
           "Alex Huszagh <ahuszagh@gmail.com>"
         ];
@@ -2606,6 +2704,7 @@ rec {
         edition = "2021";
         sha256 = "1j1avbxw7jscyi7dmnywhlwbiny1fvg1vpp9fy4dc1pd022kva16";
         procMacro = true;
+        libName = "multiversion_macros";
         authors = [
           "Caleb Zulawski <caleb.zulawski@gmail.com>"
         ];
@@ -2644,12 +2743,7 @@ rec {
             requiredFeatures = [ ];
           }
         ];
-        # We can't filter paths with references in Nix 2.4
-        # See https://github.com/NixOS/nix/issues/5410
-        src =
-          if ((lib.versionOlder builtins.nixVersion "2.4pre20211007") || (lib.versionOlder "2.5" builtins.nixVersion))
-          then lib.cleanSourceWith { filter = sourceFilter; src = ./.; }
-          else ./.;
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ./.; };
         dependencies = [
           {
             name = "anyhow";
@@ -2686,12 +2780,8 @@ rec {
         version = "0.1.0";
         edition = "2021";
         crateBin = [ ];
-        # We can't filter paths with references in Nix 2.4
-        # See https://github.com/NixOS/nix/issues/5410
-        src =
-          if ((lib.versionOlder builtins.nixVersion "2.4pre20211007") || (lib.versionOlder "2.5" builtins.nixVersion))
-          then lib.cleanSourceWith { filter = sourceFilter; src = ../../nix-compat; }
-          else ../../nix-compat;
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ../../nix-compat; };
+        libName = "nix_compat";
         dependencies = [
           {
             name = "bitflags";
@@ -2703,6 +2793,11 @@ rec {
             features = [ "alloc" "unicode" "serde" ];
           }
           {
+            name = "bytes";
+            packageId = "bytes";
+            optional = true;
+          }
+          {
             name = "data-encoding";
             packageId = "data-encoding";
           }
@@ -2723,6 +2818,10 @@ rec {
             packageId = "glob";
           }
           {
+            name = "mimalloc";
+            packageId = "mimalloc";
+          }
+          {
             name = "nom";
             packageId = "nom";
           }
@@ -2758,21 +2857,30 @@ rec {
             optional = true;
             features = [ "io-util" "macros" ];
           }
+          {
+            name = "tracing";
+            packageId = "tracing";
+          }
         ];
         devDependencies = [
           {
+            name = "mimalloc";
+            packageId = "mimalloc";
+          }
+          {
             name = "serde_json";
             packageId = "serde_json";
           }
         ];
         features = {
           "async" = [ "tokio" ];
+          "bytes" = [ "dep:bytes" ];
           "default" = [ "async" "wire" ];
           "pin-project-lite" = [ "dep:pin-project-lite" ];
           "tokio" = [ "dep:tokio" ];
-          "wire" = [ "tokio" "pin-project-lite" ];
+          "wire" = [ "tokio" "pin-project-lite" "bytes" ];
         };
-        resolvedDefaultFeatures = [ "async" "default" "pin-project-lite" "tokio" "wire" ];
+        resolvedDefaultFeatures = [ "async" "bytes" "default" "pin-project-lite" "tokio" "wire" ];
       };
       "nom" = rec {
         crateName = "nom";
@@ -2844,6 +2952,7 @@ rec {
         version = "0.2.19";
         edition = "2021";
         sha256 = "0h984rhdkkqd4ny9cif7y2azl3xdfb7768hb9irhpsch4q3gq787";
+        libName = "num_traits";
         authors = [
           "The Rust Project Developers"
         ];
@@ -2943,6 +3052,7 @@ rec {
         version = "0.2.4";
         edition = "2021";
         sha256 = "07wf6wf4jrxlq5p3xldxsnabp7jl06my2qp7kiwy9m3x2r5wac8i";
+        libName = "parquet_format_safe";
         authors = [
           "Apache Thrift contributors <dev@thrift.apache.org>"
           "Jorge Leitao <jorgecarleitao@gmail.com>"
@@ -2972,6 +3082,7 @@ rec {
         version = "2.3.0";
         edition = "2018";
         sha256 = "152slflmparkh27hprw62sph8rv77wckzhwl2dhqk6bf563lfalv";
+        libName = "percent_encoding";
         authors = [
           "The rust-url developers"
         ];
@@ -2986,6 +3097,7 @@ rec {
         version = "0.2.13";
         edition = "2018";
         sha256 = "0n0bwr5qxlf0mhn2xkl36sy55118s9qmvx2yl5f3ixkb007lbywa";
+        libName = "pin_project_lite";
 
       };
       "pin-utils" = rec {
@@ -2993,6 +3105,7 @@ rec {
         version = "0.1.0";
         edition = "2018";
         sha256 = "117ir7vslsl2z1a7qzhws4pd01cg2d3338c47swjyvqv2n60v1wb";
+        libName = "pin_utils";
         authors = [
           "Josef Brandl <mail@josefbrandl.de>"
         ];
@@ -3037,6 +3150,7 @@ rec {
         version = "0.3.27";
         edition = "2015";
         sha256 = "0r39ryh1magcq4cz5g9x88jllsnxnhcqr753islvyk4jp9h2h1r6";
+        libName = "pkg_config";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -3058,21 +3172,6 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "std" ];
       };
-      "platforms" = rec {
-        crateName = "platforms";
-        version = "3.2.0";
-        edition = "2018";
-        sha256 = "1c6bzwn877aqdbbmyqsl753ycbciwvbdh4lpzijb8vrfb4zsprhl";
-        authors = [
-          "Tony Arcieri <bascule@gmail.com>"
-          "Sergey \"Shnatsel\" Davidoff <shnatsel@gmail.com>"
-        ];
-        features = {
-          "default" = [ "std" ];
-          "serde" = [ "dep:serde" ];
-        };
-        resolvedDefaultFeatures = [ "default" "std" ];
-      };
       "polars" = rec {
         crateName = "polars";
         version = "0.36.2";
@@ -3273,6 +3372,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "0vxql6amvwyp6qj0w87vm21y33qa0i61pshs4ry7ixwgd4ps0s6f";
+        libName = "polars_arrow";
         authors = [
           "Jorge C. Leitao <jorgecarleitao@gmail.com>"
           "Apache Arrow <dev@arrow.apache.org>"
@@ -3334,7 +3434,7 @@ rec {
           {
             name = "getrandom";
             packageId = "getrandom";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "wasm32-unknown-unknown");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "wasm32-unknown-unknown");
             features = [ "js" ];
           }
           {
@@ -3451,6 +3551,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "1pa4l1w41gdzdff81ih1z05z3gz568k81i6flibbca8v2igvqkxi";
+        libName = "polars_compute";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -3490,6 +3591,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "08sar9h97znpb8ziyvrrvvx28lyzc21vwsd7gvwybjxn6kkyzxfh";
+        libName = "polars_core";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -3644,6 +3746,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "11m80a899kp45b3dz9jbvrn5001hw8izbdp7d2czrswrixwdx5k3";
+        libName = "polars_error";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -3680,6 +3783,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "1smamd34ghlxyxdd4aqdplk7k8xm1l626brmzlc4fvwlx3pmh13x";
+        libName = "polars_io";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -3838,6 +3942,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "1mdml4hs574njb23mb9gl6x92x2bb4vdfzsazkl3ifq516s0awcx";
+        libName = "polars_lazy";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -4032,6 +4137,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "13m7dh4vpdmcah04a7kql933l7y7045f0hybbmgff4dbav2ay29f";
+        libName = "polars_ops";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -4183,6 +4289,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "0gay037sw5hcg2q93pxcfh7krcchl1px8g838d8vhjpnn5klv8kv";
+        libName = "polars_parquet";
         authors = [
           "Jorge C. Leitao <jorgecarleitao@gmail.com>"
           "Apache Arrow <dev@arrow.apache.org>"
@@ -4303,6 +4410,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "00217q51mnq7i57k3sax293xnwghm5hridbpgl11fffcfg8fmdyr";
+        libName = "polars_pipe";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -4416,6 +4524,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "1l5ca38v7ksq4595wdr6wsd74pdgszwivq9y8wfc6l6h4ib1fjiq";
+        libName = "polars_plan";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -4604,6 +4713,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "0by7x6jlj5dwr3f1gj49v8w65l3kpqhwhzb9qzlv6gdqrdx2ycij";
+        libName = "polars_row";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -4633,6 +4743,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "1dcmm993gycw75a6c5hxcr6h2cy6fa2r3hsbx19h9zgxvxnlq2wz";
+        libName = "polars_sql";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -4697,6 +4808,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "1l24fmpv5v1qshxfd4592z8x6bnjlwy4njhf9rcbdlbbr6gn9qny";
+        libName = "polars_time";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -4775,6 +4887,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "1nmvfqwyzbaxcw457amspz479y5vcnpalq043awxfixdfx5clx5i";
+        libName = "polars_utils";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -4842,6 +4955,7 @@ rec {
         version = "0.2.17";
         edition = "2018";
         sha256 = "1pp6g52aw970adv3x2310n7glqnji96z0a9wiamzw89ibf0ayh2v";
+        libName = "ppv_lite86";
         authors = [
           "The CryptoCorrosion Contributors"
         ];
@@ -4855,6 +4969,7 @@ rec {
         version = "1.0.69";
         edition = "2021";
         sha256 = "1nljgyllbm3yr3pa081bf83gxh6l4zvjqzaldw7v4mj9xfgihk0k";
+        libName = "proc_macro2";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
           "Alex Crichton <alex@alexcrichton.com>"
@@ -5056,6 +5171,7 @@ rec {
         edition = "2021";
         links = "rayon-core";
         sha256 = "1vaq0q71yfvcwlmia0iqf6ixj2fibjcf2xjy92n1m1izv1mgpqsw";
+        libName = "rayon_core";
         authors = [
           "Niko Matsakis <niko@alum.mit.edu>"
           "Josh Stone <cuviper@gmail.com>"
@@ -5153,6 +5269,7 @@ rec {
         version = "0.4.3";
         edition = "2021";
         sha256 = "0gs8q9yhd3kcg4pr00ag4viqxnh5l7jpyb9fsfr8hzh451w4r02z";
+        libName = "regex_automata";
         authors = [
           "The Rust Project Developers"
           "Andrew Gallant <jamslam@gmail.com>"
@@ -5213,6 +5330,7 @@ rec {
         version = "0.8.2";
         edition = "2021";
         sha256 = "17rd2s8xbiyf6lb4aj2nfi44zqlj98g2ays8zzj2vfs743k79360";
+        libName = "regex_syntax";
         authors = [
           "The Rust Project Developers"
           "Andrew Gallant <jamslam@gmail.com>"
@@ -5229,6 +5347,7 @@ rec {
         version = "0.1.23";
         edition = "2015";
         sha256 = "0xnbk2bmyzshacjm2g1kd4zzv2y2az14bw3sjccq5qkpmsfvn9nn";
+        libName = "rustc_demangle";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -5420,6 +5539,7 @@ rec {
         edition = "2018";
         sha256 = "1d50kbaslrrd0374ivx15jg57f03y5xzil1wd2ajlvajzlkbzw53";
         procMacro = true;
+        libName = "seq_macro";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -5763,6 +5883,7 @@ rec {
         version = "0.1.2";
         edition = "2018";
         sha256 = "1wscqj3s30qknda778wf7z99mknk65p0h9hhs658l4pvkfqw6v5z";
+        libName = "streaming_decompression";
         dependencies = [
           {
             name = "fallible-streaming-iterator";
@@ -5776,6 +5897,7 @@ rec {
         version = "0.1.9";
         edition = "2021";
         sha256 = "0845zdv8qb7zwqzglpqc0830i43xh3fb6vqms155wz85qfvk28ib";
+        libName = "streaming_iterator";
         authors = [
           "Steven Fackler <sfackler@gmail.com>"
         ];
@@ -5962,6 +6084,7 @@ rec {
         version = "0.1.5";
         edition = "2021";
         sha256 = "1gb974chm9aj8ifkyibylxkyb5an4bf5y8dxb18pqmck698gmdfg";
+        libName = "target_features";
         authors = [
           "Caleb Zulawski <caleb.zulawski@gmail.com>"
         ];
@@ -6012,6 +6135,7 @@ rec {
         version = "0.3.4";
         edition = "2018";
         sha256 = "1xksx1l1019k9q0az9mhqsgb14w0vm88yax30iq6178s3d9yhjx7";
+        libName = "tempfile_fast";
         authors = [
           "Chris West (Faux) <git@goeswhere.com>"
         ];
@@ -6054,6 +6178,7 @@ rec {
         edition = "2021";
         sha256 = "1f0lmam4765sfnwr4b1n00y14vxh10g0311mkk0adr80pi02wsr6";
         procMacro = true;
+        libName = "thiserror_impl";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -6178,6 +6303,7 @@ rec {
         edition = "2021";
         sha256 = "0fwjy4vdx1h9pi4g2nml72wi0fr27b5m954p13ji9anyy8l1x2jv";
         procMacro = true;
+        libName = "tokio_macros";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -6203,6 +6329,7 @@ rec {
         version = "0.7.10";
         edition = "2021";
         sha256 = "058y6x4mf0fsqji9rfyb77qbfyc50y4pk2spqgj6xsyr693z66al";
+        libName = "tokio_util";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -6253,6 +6380,96 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "io" "io-util" ];
       };
+      "tracing" = rec {
+        crateName = "tracing";
+        version = "0.1.40";
+        edition = "2018";
+        sha256 = "1vv48dac9zgj9650pg2b4d0j3w6f3x9gbggf43scq5hrlysklln3";
+        authors = [
+          "Eliza Weisman <eliza@buoyant.io>"
+          "Tokio Contributors <team@tokio.rs>"
+        ];
+        dependencies = [
+          {
+            name = "pin-project-lite";
+            packageId = "pin-project-lite";
+          }
+          {
+            name = "tracing-attributes";
+            packageId = "tracing-attributes";
+            optional = true;
+          }
+          {
+            name = "tracing-core";
+            packageId = "tracing-core";
+            usesDefaultFeatures = false;
+          }
+        ];
+        features = {
+          "attributes" = [ "tracing-attributes" ];
+          "default" = [ "std" "attributes" ];
+          "log" = [ "dep:log" ];
+          "log-always" = [ "log" ];
+          "std" = [ "tracing-core/std" ];
+          "tracing-attributes" = [ "dep:tracing-attributes" ];
+          "valuable" = [ "tracing-core/valuable" ];
+        };
+        resolvedDefaultFeatures = [ "attributes" "default" "std" "tracing-attributes" ];
+      };
+      "tracing-attributes" = rec {
+        crateName = "tracing-attributes";
+        version = "0.1.27";
+        edition = "2018";
+        sha256 = "1rvb5dn9z6d0xdj14r403z0af0bbaqhg02hq4jc97g5wds6lqw1l";
+        procMacro = true;
+        libName = "tracing_attributes";
+        authors = [
+          "Tokio Contributors <team@tokio.rs>"
+          "Eliza Weisman <eliza@buoyant.io>"
+          "David Barsky <dbarsky@amazon.com>"
+        ];
+        dependencies = [
+          {
+            name = "proc-macro2";
+            packageId = "proc-macro2";
+          }
+          {
+            name = "quote";
+            packageId = "quote";
+          }
+          {
+            name = "syn";
+            packageId = "syn 2.0.39";
+            usesDefaultFeatures = false;
+            features = [ "full" "parsing" "printing" "visit-mut" "clone-impls" "extra-traits" "proc-macro" ];
+          }
+        ];
+        features = { };
+      };
+      "tracing-core" = rec {
+        crateName = "tracing-core";
+        version = "0.1.32";
+        edition = "2018";
+        sha256 = "0m5aglin3cdwxpvbg6kz0r9r0k31j48n0kcfwsp6l49z26k3svf0";
+        libName = "tracing_core";
+        authors = [
+          "Tokio Contributors <team@tokio.rs>"
+        ];
+        dependencies = [
+          {
+            name = "once_cell";
+            packageId = "once_cell";
+            optional = true;
+          }
+        ];
+        features = {
+          "default" = [ "std" "valuable/std" ];
+          "once_cell" = [ "dep:once_cell" ];
+          "std" = [ "once_cell" ];
+          "valuable" = [ "dep:valuable" ];
+        };
+        resolvedDefaultFeatures = [ "once_cell" "std" ];
+      };
       "typenum" = rec {
         crateName = "typenum";
         version = "1.17.0";
@@ -6273,6 +6490,7 @@ rec {
         version = "1.0.12";
         edition = "2018";
         sha256 = "0jzf1znfpb2gx8nr8mvmyqs1crnv79l57nxnbiszc7xf7ynbjm1k";
+        libName = "unicode_ident";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -6310,6 +6528,7 @@ rec {
         version = "0.2.88";
         edition = "2018";
         sha256 = "1khgsh4z9bga35mjhg41dl7523i69ffc5m8ckhqaw6ssyabc5bkx";
+        libName = "wasm_bindgen";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -6341,6 +6560,7 @@ rec {
         version = "0.2.88";
         edition = "2018";
         sha256 = "05zj8yl243rvs87rhicq2l1d6443lnm6k90khf744khf9ikg95z3";
+        libName = "wasm_bindgen_backend";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -6386,6 +6606,7 @@ rec {
         edition = "2018";
         sha256 = "1chn3wgw9awmvs0fpmazbqyc5rwfgy3pj7lzwczmzb887dxh2qar";
         procMacro = true;
+        libName = "wasm_bindgen_macro";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -6410,6 +6631,7 @@ rec {
         version = "0.2.88";
         edition = "2018";
         sha256 = "01rrzg3y1apqygsjz1jg0n7p831nm4kdyxmxyl85x7v6mf6kndf5";
+        libName = "wasm_bindgen_macro_support";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -6448,6 +6670,7 @@ rec {
         edition = "2018";
         links = "wasm_bindgen";
         sha256 = "02vmw2rzsla1qm0zgfng4kqz52xn8k54v8ads4g1macv09fnq10d";
+        libName = "wasm_bindgen_shared";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -6465,12 +6688,12 @@ rec {
           {
             name = "winapi-i686-pc-windows-gnu";
             packageId = "winapi-i686-pc-windows-gnu";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "i686-pc-windows-gnu");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "i686-pc-windows-gnu");
           }
           {
             name = "winapi-x86_64-pc-windows-gnu";
             packageId = "winapi-x86_64-pc-windows-gnu";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnu");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "x86_64-pc-windows-gnu");
           }
         ];
         features = {
@@ -6483,6 +6706,7 @@ rec {
         version = "0.4.0";
         edition = "2015";
         sha256 = "1dmpa6mvcvzz16zg6d5vrfy4bxgg541wxrcip7cnshi06v38ffxc";
+        libName = "winapi_i686_pc_windows_gnu";
         authors = [
           "Peter Atashian <retep998@gmail.com>"
         ];
@@ -6493,6 +6717,7 @@ rec {
         version = "0.4.0";
         edition = "2015";
         sha256 = "0gqq64czqb64kskjryj8isp62m2sgvx25yyj3kpc2myh85w24bki";
+        libName = "winapi_x86_64_pc_windows_gnu";
         authors = [
           "Peter Atashian <retep998@gmail.com>"
         ];
@@ -7184,6 +7409,7 @@ rec {
         version = "0.51.1";
         edition = "2021";
         sha256 = "0r1f57hsshsghjyc7ypp2s0i78f7b1vr93w68sdb8baxyf2czy7i";
+        libName = "windows_core";
         authors = [
           "Microsoft"
         ];
@@ -7201,6 +7427,7 @@ rec {
         version = "0.52.0";
         edition = "2021";
         sha256 = "1nc3qv7sy24x0nlnb32f7alzpd6f72l4p24vl65vydbyil669ark";
+        libName = "windows_core";
         authors = [
           "Microsoft"
         ];
@@ -7218,6 +7445,7 @@ rec {
         version = "0.48.0";
         edition = "2018";
         sha256 = "1aan23v5gs7gya1lc46hqn9mdh8yph3fhxmhxlw36pn6pqc28zb7";
+        libName = "windows_sys";
         authors = [
           "Microsoft"
         ];
@@ -7511,6 +7739,7 @@ rec {
         version = "0.48.5";
         edition = "2018";
         sha256 = "034ljxqshifs1lan89xwpcy1hp0lhdh4b5n0d2z4fwjx2piacbws";
+        libName = "windows_targets";
         authors = [
           "Microsoft"
         ];
@@ -7518,7 +7747,7 @@ rec {
           {
             name = "windows_aarch64_gnullvm";
             packageId = "windows_aarch64_gnullvm 0.48.5";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-pc-windows-gnullvm");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "aarch64-pc-windows-gnullvm");
           }
           {
             name = "windows_aarch64_msvc";
@@ -7543,7 +7772,7 @@ rec {
           {
             name = "windows_x86_64_gnullvm";
             packageId = "windows_x86_64_gnullvm 0.48.5";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnullvm");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "x86_64-pc-windows-gnullvm");
           }
           {
             name = "windows_x86_64_msvc";
@@ -7558,6 +7787,7 @@ rec {
         version = "0.52.0";
         edition = "2021";
         sha256 = "1kg7a27ynzw8zz3krdgy6w5gbqcji27j1sz4p7xk2j5j8082064a";
+        libName = "windows_targets";
         authors = [
           "Microsoft"
         ];
@@ -7565,7 +7795,7 @@ rec {
           {
             name = "windows_aarch64_gnullvm";
             packageId = "windows_aarch64_gnullvm 0.52.0";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-pc-windows-gnullvm");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "aarch64-pc-windows-gnullvm");
           }
           {
             name = "windows_aarch64_msvc";
@@ -7590,7 +7820,7 @@ rec {
           {
             name = "windows_x86_64_gnullvm";
             packageId = "windows_x86_64_gnullvm 0.52.0";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnullvm");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "x86_64-pc-windows-gnullvm");
           }
           {
             name = "windows_x86_64_msvc";
@@ -7745,6 +7975,7 @@ rec {
         version = "0.8.7";
         edition = "2018";
         sha256 = "0yz037yrkn0qa0g0r6733ynd1xbw7zvx58v6qylhyi2kv9wb2a4q";
+        libName = "xxhash_rust";
         authors = [
           "Douman <douman@gmx.se>"
         ];
@@ -7793,6 +8024,7 @@ rec {
         edition = "2018";
         sha256 = "0fqvglw01w3hp7xj9gdk1800x9j7v58s9w8ijiyiz2a7krb39s8m";
         procMacro = true;
+        libName = "zerocopy_derive";
         authors = [
           "Joshua Liebow-Feeser <joshlf@google.com>"
         ];
@@ -7867,6 +8099,7 @@ rec {
         version = "7.0.0";
         edition = "2018";
         sha256 = "0gpav2lcibrpmyslmjkcn3w0w64qif3jjljd2h8lr4p249s7qx23";
+        libName = "zstd_safe";
         authors = [
           "Alexandre Bury <alexandre.bury@gmail.com>"
         ];
@@ -7900,6 +8133,7 @@ rec {
         edition = "2018";
         links = "zstd";
         sha256 = "0mk6a2367swdi22zg03lcackpnvgq96d7120awd4i83lm2lfy5ly";
+        libName = "zstd_sys";
         authors = [
           "Alexandre Bury <alexandre.bury@gmail.com>"
         ];
@@ -7935,14 +8169,11 @@ rec {
       fuchsia = true;
       test = false;
 
-      /* We are choosing an arbitrary rust version to grab `lib` from,
-      which is unfortunate, but `lib` has been version-agnostic the
-      whole time so this is good enough for now.
-      */
-      os = pkgs.rust.lib.toTargetOs platform;
-      arch = pkgs.rust.lib.toTargetArch platform;
-      family = pkgs.rust.lib.toTargetFamily platform;
-      vendor = pkgs.rust.lib.toTargetVendor platform;
+      inherit (platform.rust.platform)
+        arch
+        os
+        vendor;
+      family = platform.rust.platform.target-family;
       env = "gnu";
       endian =
         if platform.parsed.cpu.significantByte.name == "littleEndian"
@@ -8032,52 +8263,41 @@ rec {
                 testPostRun
               ]);
           in
-          pkgs.runCommand "run-tests-${testCrate.name}"
-            {
-              inherit testCrateFlags;
-              buildInputs = testInputs;
-            } ''
-            set -e
+          pkgs.stdenvNoCC.mkDerivation {
+            name = "run-tests-${testCrate.name}";
 
-            export RUST_BACKTRACE=1
+            inherit (crate) src;
 
-            # recreate a file hierarchy as when running tests with cargo
+            inherit testCrateFlags;
 
-            # the source for test data
-            # It's necessary to locate the source in $NIX_BUILD_TOP/source/
-            # instead of $NIX_BUILD_TOP/
-            # because we compiled those test binaries in the former and not the latter.
-            # So all paths will expect source tree to be there and not in the build top directly.
-            # For example: $NIX_BUILD_TOP := /build in general, if you ask yourself.
-            # NOTE: There could be edge cases if `crate.sourceRoot` does exist but
-            # it's very hard to reason about them.
-            # Open a bug if you run into this!
-            mkdir -p source/
-            cd source/
+            buildInputs = testInputs;
 
-            ${pkgs.buildPackages.xorg.lndir}/bin/lndir ${crate.src}
+            buildPhase = ''
+              set -e
+              export RUST_BACKTRACE=1
 
-            # build outputs
-            testRoot=target/debug
-            mkdir -p $testRoot
+              # build outputs
+              testRoot=target/debug
+              mkdir -p $testRoot
 
-            # executables of the crate
-            # we copy to prevent std::env::current_exe() to resolve to a store location
-            for i in ${crate}/bin/*; do
-              cp "$i" "$testRoot"
-            done
-            chmod +w -R .
+              # executables of the crate
+              # we copy to prevent std::env::current_exe() to resolve to a store location
+              for i in ${crate}/bin/*; do
+                cp "$i" "$testRoot"
+              done
+              chmod +w -R .
 
-            # test harness executables are suffixed with a hash, like cargo does
-            # this allows to prevent name collision with the main
-            # executables of the crate
-            hash=$(basename $out)
-            for file in ${drv}/tests/*; do
-              f=$testRoot/$(basename $file)-$hash
-              cp $file $f
-              ${testCommand}
-            done
-          '';
+              # test harness executables are suffixed with a hash, like cargo does
+              # this allows to prevent name collision with the main
+              # executables of the crate
+              hash=$(basename $out)
+              for file in ${drv}/tests/*; do
+                f=$testRoot/$(basename $file)-$hash
+                cp $file $f
+                ${testCommand}
+              done
+            '';
+          };
       in
       pkgs.runCommand "${crate.name}-linked"
         {
@@ -8186,7 +8406,7 @@ rec {
             let
               self = {
                 crates = lib.mapAttrs (packageId: value: buildByPackageIdForPkgsImpl self pkgs packageId) crateConfigs;
-                target = makeTarget pkgs.stdenv.hostPlatform;
+                target = makeTarget stdenv.hostPlatform;
                 build = mkBuiltByPackageIdByPkgs pkgs.buildPackages;
               };
             in
@@ -8261,8 +8481,6 @@ rec {
             buildRustCrateForPkgsFunc pkgs
               (
                 crateConfig // {
-                  # https://github.com/NixOS/nixpkgs/issues/218712
-                  dontStrip = stdenv.hostPlatform.isDarwin;
                   src = crateConfig.src or (
                     pkgs.fetchurl rec {
                       name = "${crateConfig.crateName}-${crateConfig.version}.tar.gz";
diff --git a/tvix/tools/narinfo2parquet/default.nix b/tvix/tools/narinfo2parquet/default.nix
index 5a84d7f64287..dfe2b66655c8 100644
--- a/tvix/tools/narinfo2parquet/default.nix
+++ b/tvix/tools/narinfo2parquet/default.nix
@@ -1,3 +1,11 @@
-{ pkgs, ... }:
+{ pkgs, depot, ... }:
 
-(pkgs.callPackage ./Cargo.nix { }).rootCrate.build
+(pkgs.callPackage ./Cargo.nix {
+  defaultCrateOverrides = (depot.tvix.utils.defaultCrateOverridesForPkgs pkgs) // {
+    narinfo2parquet = prev: {
+      src = depot.tvix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
+    };
+  };
+}).rootCrate.build.overrideAttrs {
+  meta.ci.extraSteps.crate2nix = depot.tvix.utils.mkCrate2nixCheck ./Cargo.nix;
+}
diff --git a/tvix/tools/narinfo2parquet/src/main.rs b/tvix/tools/narinfo2parquet/src/main.rs
index 3b793e02b133..ea3d39af5503 100644
--- a/tvix/tools/narinfo2parquet/src/main.rs
+++ b/tvix/tools/narinfo2parquet/src/main.rs
@@ -12,7 +12,6 @@ use jemallocator::Jemalloc;
 use nix_compat::{
     narinfo::{self, NarInfo},
     nixbase32,
-    nixhash::{CAHash, NixHash},
 };
 use polars::{io::parquet::ParquetWriter, prelude::*};
 use std::{
@@ -114,6 +113,9 @@ struct FrameBuilder {
     store_path_hash_str: StringChunkedBuilder,
     store_path_hash: BinaryChunkedBuilder,
     store_path_name: StringChunkedBuilder,
+    deriver_hash_str: StringChunkedBuilder,
+    deriver_hash: BinaryChunkedBuilder,
+    deriver_name: StringChunkedBuilder,
     nar_hash: BinaryChunkedBuilder,
     nar_size: PrimitiveChunkedBuilder<UInt64Type>,
     references: ListBinaryChunkedBuilder,
@@ -133,6 +135,9 @@ impl Default for FrameBuilder {
             store_path_hash_str: StringChunkedBuilder::new("store_path_hash_str", 0, 0),
             store_path_hash: BinaryChunkedBuilder::new("store_path_hash", 0, 0),
             store_path_name: StringChunkedBuilder::new("store_path_name", 0, 0),
+            deriver_hash_str: StringChunkedBuilder::new("deriver_hash_str", 0, 0),
+            deriver_hash: BinaryChunkedBuilder::new("deriver_hash", 0, 0),
+            deriver_name: StringChunkedBuilder::new("deriver_name", 0, 0),
             nar_hash: BinaryChunkedBuilder::new("nar_hash", 0, 0),
             nar_size: PrimitiveChunkedBuilder::new("nar_size", 0),
             references: ListBinaryChunkedBuilder::new("references", 0, 0),
@@ -159,10 +164,20 @@ impl FrameBuilder {
     fn push(&mut self, entry: &NarInfo) {
         self.store_path_hash_str
             .append_value(nixbase32::encode(entry.store_path.digest()));
-
         self.store_path_hash.append_value(entry.store_path.digest());
         self.store_path_name.append_value(entry.store_path.name());
 
+        if let Some(deriver) = &entry.deriver {
+            self.deriver_hash_str
+                .append_value(nixbase32::encode(deriver.digest()));
+            self.deriver_hash.append_value(deriver.digest());
+            self.deriver_name.append_value(deriver.name());
+        } else {
+            self.deriver_hash_str.append_null();
+            self.deriver_hash.append_null();
+            self.deriver_name.append_null();
+        }
+
         self.nar_hash.append_value(&entry.nar_hash);
         self.nar_size.append_value(entry.nar_size);
 
@@ -172,31 +187,13 @@ impl FrameBuilder {
         assert!(entry.signatures.len() <= 1);
         self.signature
             .append_option(entry.signatures.get(0).map(|sig| {
-                assert_eq!(sig.name(), "cache.nixos.org-1");
+                assert_eq!(sig.name(), &"cache.nixos.org-1");
                 sig.bytes()
             }));
 
         if let Some(ca) = &entry.ca {
-            // decompose the CAHash into algo and hash parts
-            // TODO(edef): move this into CAHash
-            let (algo, hash) = match ca {
-                CAHash::Flat(h) => match h {
-                    NixHash::Md5(h) => ("fixed:md5", &h[..]),
-                    NixHash::Sha1(h) => ("fixed:sha1", &h[..]),
-                    NixHash::Sha256(h) => ("fixed:sha256", &h[..]),
-                    NixHash::Sha512(h) => ("fixed:sha512", &h[..]),
-                },
-                CAHash::Nar(h) => match h {
-                    NixHash::Md5(h) => ("fixed:r:md5", &h[..]),
-                    NixHash::Sha1(h) => ("fixed:r:sha1", &h[..]),
-                    NixHash::Sha256(h) => ("fixed:r:sha256", &h[..]),
-                    NixHash::Sha512(h) => ("fixed:r:sha512", &h[..]),
-                },
-                CAHash::Text(h) => ("text:sha256", &h[..]),
-            };
-
-            self.ca_algo.append_value(algo);
-            self.ca_hash.append_value(hash);
+            self.ca_algo.append_value(ca.algo_str());
+            self.ca_hash.append_value(ca.hash().digest_as_bytes());
         } else {
             self.ca_algo.append_null();
             self.ca_hash.append_null();
@@ -247,6 +244,9 @@ impl FrameBuilder {
             "store_path_hash_str" => self.store_path_hash_str.finish().into_series(),
             "store_path_hash" => self.store_path_hash.finish().into_series(),
             "store_path_name" => self.store_path_name.finish().into_series(),
+            "deriver_hash_str" => self.deriver_hash_str.finish().into_series(),
+            "deriver_hash" => self.deriver_hash.finish().into_series(),
+            "deriver_name" => self.deriver_name.finish().into_series(),
             "nar_hash" => self.nar_hash.finish().into_series(),
             "nar_size" => self.nar_size.finish().into_series(),
             "references" => self.references.finish().into_series(),
diff --git a/tvix/tools/turbofetch/Cargo.lock b/tvix/tools/turbofetch/Cargo.lock
index 4d65fc40639c..c865efb47466 100644
--- a/tvix/tools/turbofetch/Cargo.lock
+++ b/tvix/tools/turbofetch/Cargo.lock
@@ -230,9 +230,9 @@ dependencies = [
 
 [[package]]
 name = "data-encoding"
-version = "2.4.0"
+version = "2.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308"
+checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2"
 
 [[package]]
 name = "digest"
@@ -265,6 +265,12 @@ dependencies = [
 ]
 
 [[package]]
+name = "equivalent"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
+
+[[package]]
 name = "fnv"
 version = "1.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -388,9 +394,9 @@ checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0"
 
 [[package]]
 name = "h2"
-version = "0.3.21"
+version = "0.3.26"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833"
+checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8"
 dependencies = [
  "bytes",
  "fnv",
@@ -407,9 +413,9 @@ dependencies = [
 
 [[package]]
 name = "hashbrown"
-version = "0.12.3"
+version = "0.14.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
+checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
 
 [[package]]
 name = "hermit-abi"
@@ -541,11 +547,11 @@ dependencies = [
 
 [[package]]
 name = "indexmap"
-version = "1.9.3"
+version = "2.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
+checksum = "93ead53efc7ea8ed3cfb0c79fc8023fbb782a5432b52830b6518941cebe6505c"
 dependencies = [
- "autocfg",
+ "equivalent",
  "hashbrown",
 ]
 
@@ -697,9 +703,9 @@ dependencies = [
 
 [[package]]
 name = "mio"
-version = "0.8.9"
+version = "0.8.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0"
+checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c"
 dependencies = [
  "libc",
  "wasi",
@@ -1151,9 +1157,9 @@ dependencies = [
 
 [[package]]
 name = "shlex"
-version = "1.2.0"
+version = "1.3.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a7cee0529a6d40f580e7a5e6c495c8fbfe21b7b52795ed4bb5e62cdf92bc6380"
+checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
 
 [[package]]
 name = "signal-hook-registry"
diff --git a/tvix/tools/turbofetch/Cargo.nix b/tvix/tools/turbofetch/Cargo.nix
index d7263727e2dd..09c9493430d5 100644
--- a/tvix/tools/turbofetch/Cargo.nix
+++ b/tvix/tools/turbofetch/Cargo.nix
@@ -1,4 +1,4 @@
-# This file was @generated by crate2nix 0.13.0 with the command:
+# This file was @generated by crate2nix 0.14.1 with the command:
 #   "generate" "--all-features"
 # See https://github.com/kolloch/crate2nix for more info.
 
@@ -13,6 +13,8 @@
 , rootFeatures ? [ "default" ]
   # If true, throw errors instead of issueing deprecation warnings.
 , strictDeprecation ? false
+  # Elements to add to the `-C target-feature=` argument passed to `rustc`
+  # (separated by `,`, prefixed with `+`).
   # Used for conditional compilation based on CPU feature detection.
 , targetFeatures ? [ ]
   # Whether to perform release builds: longer compile times, faster binaries.
@@ -130,6 +132,7 @@ rec {
         version = "0.1.1";
         edition = "2018";
         sha256 = "1w7ynjxrfs97xg3qlcdns4kgfpwcdv824g611fq32cag4cdr96g9";
+        libName = "android_tzdata";
         authors = [
           "RumovZ"
         ];
@@ -156,6 +159,7 @@ rec {
         version = "0.3.5";
         edition = "2018";
         sha256 = "0l8sjq1rylkb1ak0pdyjn83b3k6x36j22myngl4sqqgg7whdsmnd";
+        libName = "async_stream";
         authors = [
           "Carl Lerche <me@carllerche.com>"
         ];
@@ -181,6 +185,7 @@ rec {
         edition = "2018";
         sha256 = "14q179j4y8p2z1d0ic6aqgy9fhwz8p9cai1ia8kpw4bw7q12mrhn";
         procMacro = true;
+        libName = "async_stream_impl";
         authors = [
           "Carl Lerche <me@carllerche.com>"
         ];
@@ -207,6 +212,7 @@ rec {
         edition = "2021";
         sha256 = "1ydhbsqjqqa6bxbv0kgys2wq2vi3jpwjy57dk162ajwppgqkfrd6";
         procMacro = true;
+        libName = "async_trait";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -461,6 +467,7 @@ rec {
         version = "0.9.0";
         edition = "2018";
         sha256 = "1r4pf90s7d7lj1wdjhlnqa26vvbm6pnc33z138lxpnp9srpi2lj1";
+        libName = "block_buffer";
         authors = [
           "RustCrypto Developers"
         ];
@@ -544,6 +551,7 @@ rec {
         version = "1.0.0";
         edition = "2018";
         sha256 = "1za0vb97n4brpzpv8lsbnzmq5r8f2b0cpqqr0sy8h5bn751xxwds";
+        libName = "cfg_if";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -614,6 +622,7 @@ rec {
         version = "0.9.3";
         edition = "2015";
         sha256 = "0ii1ihpjb30fk38gdikm5wqlkmyr8k46fh4k2r8sagz5dng7ljhr";
+        libName = "core_foundation";
         authors = [
           "The Servo Project Developers"
         ];
@@ -641,6 +650,7 @@ rec {
         version = "0.8.4";
         edition = "2015";
         sha256 = "1yhf471qj6snnm2mcswai47vsbc9w30y4abmdp4crb4av87sb5p4";
+        libName = "core_foundation_sys";
         authors = [
           "The Servo Project Developers"
         ];
@@ -658,7 +668,7 @@ rec {
           {
             name = "libc";
             packageId = "libc";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-linux-android");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "aarch64-linux-android");
           }
           {
             name = "libc";
@@ -703,6 +713,7 @@ rec {
         version = "0.11.1";
         edition = "2018";
         sha256 = "05672ncc54h66vph42s0a42ljl69bwnqjh0x4xgj2v1395psildi";
+        libName = "crypto_mac";
         authors = [
           "RustCrypto Developers"
         ];
@@ -725,9 +736,10 @@ rec {
       };
       "data-encoding" = rec {
         crateName = "data-encoding";
-        version = "2.4.0";
+        version = "2.6.0";
         edition = "2018";
-        sha256 = "023k3dk8422jgbj7k72g63x51h1mhv91dhw1j4h205vzh6fnrrn2";
+        sha256 = "1qnn68n4vragxaxlkqcb1r28d3hhj43wch67lm4rpxlw89wnjmp8";
+        libName = "data_encoding";
         authors = [
           "Julien Cretin <git@ia0.eu>"
         ];
@@ -763,6 +775,7 @@ rec {
         version = "2.0.0";
         edition = "2018";
         sha256 = "1q9kr151h9681wwp6is18750ssghz6j9j7qm7qi1ngcwy7mzi35r";
+        libName = "dirs_next";
         authors = [
           "The @xdg-rs members"
         ];
@@ -783,6 +796,7 @@ rec {
         version = "0.1.2";
         edition = "2018";
         sha256 = "0kavhavdxv4phzj4l0psvh55hszwnr0rcz8sxbvx20pyqi2a3gaf";
+        libName = "dirs_sys_next";
         authors = [
           "The @xdg-rs members"
         ];
@@ -807,6 +821,13 @@ rec {
         ];
 
       };
+      "equivalent" = rec {
+        crateName = "equivalent";
+        version = "1.0.1";
+        edition = "2015";
+        sha256 = "1malmx5f4lkfvqasz319lq6gb3ddg19yzf9s8cykfsgzdmyq0hsl";
+
+      };
       "fnv" = rec {
         crateName = "fnv";
         version = "1.0.7";
@@ -887,6 +908,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "0y6b7xxqdjm9hlcjpakcg41qfl7lihf6gavk8fyqijsxhvbzgj7a";
+        libName = "futures_channel";
         dependencies = [
           {
             name = "futures-core";
@@ -914,6 +936,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "07aslayrn3lbggj54kci0ishmd1pr367fp7iks7adia1p05miinz";
+        libName = "futures_core";
         features = {
           "default" = [ "std" ];
           "portable-atomic" = [ "dep:portable-atomic" ];
@@ -926,6 +949,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "07dh08gs9vfll2h36kq32q9xd86xm6lyl9xikmmwlkqnmrrgqxm5";
+        libName = "futures_executor";
         dependencies = [
           {
             name = "futures-core";
@@ -956,6 +980,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "1hgh25isvsr4ybibywhr4dpys8mjnscw4wfxxwca70cn1gi26im4";
+        libName = "futures_io";
         features = {
           "default" = [ "std" ];
         };
@@ -967,6 +992,7 @@ rec {
         edition = "2018";
         sha256 = "1b49qh9d402y8nka4q6wvvj0c88qq91wbr192mdn5h54nzs0qxc7";
         procMacro = true;
+        libName = "futures_macro";
         dependencies = [
           {
             name = "proc-macro2";
@@ -989,6 +1015,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "1dag8xyyaya8n8mh8smx7x6w2dpmafg2din145v973a3hw7f1f4z";
+        libName = "futures_sink";
         features = {
           "default" = [ "std" ];
           "std" = [ "alloc" ];
@@ -1000,6 +1027,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "013h1724454hj8qczp8vvs10qfiqrxr937qsrv6rhii68ahlzn1q";
+        libName = "futures_task";
         features = {
           "default" = [ "std" ];
           "std" = [ "alloc" ];
@@ -1011,6 +1039,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "0j0xqhcir1zf2dcbpd421kgw6wvsk0rpxflylcysn1rlp3g02r1x";
+        libName = "futures_util";
         dependencies = [
           {
             name = "futures-channel";
@@ -1173,9 +1202,9 @@ rec {
       };
       "h2" = rec {
         crateName = "h2";
-        version = "0.3.21";
+        version = "0.3.26";
         edition = "2018";
-        sha256 = "0cq8g5bgk3fihnqicy3g8gc3dpsalzqjg4bjyip9g4my26m27z4i";
+        sha256 = "1s7msnfv7xprzs6xzfj5sg6p8bjcdpcqcmjjbkd345cyi1x55zl1";
         authors = [
           "Carl Lerche <me@carllerche.com>"
           "Sean McArthur <sean@seanmonstar.com>"
@@ -1225,7 +1254,7 @@ rec {
           {
             name = "tokio-util";
             packageId = "tokio-util";
-            features = [ "codec" ];
+            features = [ "codec" "io" ];
           }
           {
             name = "tracing";
@@ -1245,21 +1274,23 @@ rec {
       };
       "hashbrown" = rec {
         crateName = "hashbrown";
-        version = "0.12.3";
+        version = "0.14.5";
         edition = "2021";
-        sha256 = "1268ka4750pyg2pbgsr43f0289l5zah4arir2k4igx5a8c6fg7la";
+        sha256 = "1wa1vy1xs3mp11bn3z9dv0jricgr6a2j0zkf1g19yz3vw4il89z5";
         authors = [
           "Amanieu d'Antras <amanieu@gmail.com>"
         ];
         features = {
           "ahash" = [ "dep:ahash" ];
-          "ahash-compile-time-rng" = [ "ahash/compile-time-rng" ];
           "alloc" = [ "dep:alloc" ];
-          "bumpalo" = [ "dep:bumpalo" ];
+          "allocator-api2" = [ "dep:allocator-api2" ];
           "compiler_builtins" = [ "dep:compiler_builtins" ];
           "core" = [ "dep:core" ];
-          "default" = [ "ahash" "inline-more" ];
+          "default" = [ "ahash" "inline-more" "allocator-api2" ];
+          "equivalent" = [ "dep:equivalent" ];
+          "nightly" = [ "allocator-api2?/nightly" "bumpalo/allocator_api" ];
           "rayon" = [ "dep:rayon" ];
+          "rkyv" = [ "dep:rkyv" ];
           "rustc-dep-of-std" = [ "nightly" "core" "compiler_builtins" "alloc" "rustc-internal-api" ];
           "serde" = [ "dep:serde" ];
         };
@@ -1270,6 +1301,7 @@ rec {
         version = "0.3.3";
         edition = "2021";
         sha256 = "1dyc8qsjh876n74a3rcz8h43s27nj1sypdhsn2ms61bd3b47wzyp";
+        libName = "hermit_abi";
         authors = [
           "Stefan Lankes"
         ];
@@ -1356,6 +1388,7 @@ rec {
         version = "0.4.5";
         edition = "2018";
         sha256 = "1l967qwwlvhp198xdrnc0p5d7jwfcp6q2lm510j6zqw4s4b8zwym";
+        libName = "http_body";
         authors = [
           "Carl Lerche <me@carllerche.com>"
           "Lucio Franco <luciofranco14@gmail.com>"
@@ -1382,6 +1415,7 @@ rec {
         version = "1.1.3";
         edition = "2021";
         sha256 = "1vnald3g10gxj15dc5jjjk7aff23p1zly0xgzhn5gwfrb9k0nmkg";
+        libName = "http_serde";
         authors = [
           "Kornel <kornel@geekhood.net>"
         ];
@@ -1533,6 +1567,7 @@ rec {
         version = "0.23.2";
         edition = "2018";
         sha256 = "0736s6a32dqr107f943xaz1n05flbinq6l19lq1wsrxkc5g9d20p";
+        libName = "hyper_rustls";
         dependencies = [
           {
             name = "http";
@@ -1607,6 +1642,7 @@ rec {
         version = "0.1.58";
         edition = "2018";
         sha256 = "081vcr8z8ddhl5r1ywif6grnswk01b2ac4nks2bhn8zzdimvh9l3";
+        libName = "iana_time_zone";
         authors = [
           "Andrew Straw <strawman@astraw.com>"
           "Renรฉ Kijewski <rene.kijewski@fu-berlin.de>"
@@ -1652,6 +1688,7 @@ rec {
         version = "0.1.2";
         edition = "2018";
         sha256 = "17r6jmj31chn7xs9698r122mapq85mfnv98bb4pg6spm0si2f67k";
+        libName = "iana_time_zone_haiku";
         authors = [
           "Renรฉ Kijewski <crates.io@k6i.de>"
         ];
@@ -1665,32 +1702,32 @@ rec {
       };
       "indexmap" = rec {
         crateName = "indexmap";
-        version = "1.9.3";
+        version = "2.4.0";
         edition = "2021";
-        sha256 = "16dxmy7yvk51wvnih3a3im6fp5lmx0wx76i03n06wyak6cwhw1xx";
+        sha256 = "0p2hwvmir50qcl5q6lib8fjq5dzv4f0gqy8czcyfva3yzhzdbslk";
         dependencies = [
           {
+            name = "equivalent";
+            packageId = "equivalent";
+            usesDefaultFeatures = false;
+          }
+          {
             name = "hashbrown";
             packageId = "hashbrown";
             usesDefaultFeatures = false;
             features = [ "raw" ];
           }
         ];
-        buildDependencies = [
-          {
-            name = "autocfg";
-            packageId = "autocfg";
-          }
-        ];
         features = {
           "arbitrary" = [ "dep:arbitrary" ];
+          "borsh" = [ "dep:borsh" ];
+          "default" = [ "std" ];
           "quickcheck" = [ "dep:quickcheck" ];
           "rayon" = [ "dep:rayon" ];
           "rustc-rayon" = [ "dep:rustc-rayon" ];
           "serde" = [ "dep:serde" ];
-          "serde-1" = [ "serde" ];
         };
-        resolvedDefaultFeatures = [ "std" ];
+        resolvedDefaultFeatures = [ "default" "std" ];
       };
       "itoa" = rec {
         crateName = "itoa";
@@ -1726,6 +1763,7 @@ rec {
         version = "0.3.65";
         edition = "2018";
         sha256 = "1s1gaxgzpqfyygc7f2pwp9y128rh5f8zvsc4nm5yazgna9cw7h2l";
+        libName = "js_sys";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -1979,6 +2017,7 @@ rec {
         version = "0.1.1";
         edition = "2021";
         sha256 = "0bpzcrwq89cc5q8mgkmsyx39vjsqaxvaxs29qp8k04rndc7ysfh0";
+        libName = "magic_buffer";
         authors = [
           "Sebastian Klose <mail@sklose.com>"
         ];
@@ -2092,9 +2131,9 @@ rec {
       };
       "mio" = rec {
         crateName = "mio";
-        version = "0.8.9";
+        version = "0.8.11";
         edition = "2018";
-        sha256 = "1l23hg513c23nhcdzvk25caaj28mic6qgqadbn8axgj6bqf2ikix";
+        sha256 = "034byyl0ardml5yliy1hmvx8arkmn9rv479pid794sm07ia519m4";
         authors = [
           "Carl Lerche <me@carllerche.com>"
           "Thomas de Zeeuw <thomasdezeeuw@gmail.com>"
@@ -2135,6 +2174,7 @@ rec {
         version = "0.46.0";
         edition = "2018";
         sha256 = "115sywxh53p190lyw97alm14nc004qj5jm5lvdj608z84rbida3p";
+        libName = "nu_ansi_term";
         authors = [
           "ogham@bsago.me"
           "Ryan Scheel (Havvy) <ryan.havvy@gmail.com>"
@@ -2163,6 +2203,7 @@ rec {
         version = "0.2.17";
         edition = "2018";
         sha256 = "0z16bi5zwgfysz6765v3rd6whfbjpihx3mhsn4dg8dzj2c221qrr";
+        libName = "num_traits";
         authors = [
           "The Rust Project Developers"
         ];
@@ -2254,6 +2295,7 @@ rec {
         version = "0.3.0";
         edition = "2018";
         sha256 = "1m8kzi4nd6shdqimn0mgb24f0hxslhnqd1whakyq06wcqd086jk2";
+        libName = "opaque_debug";
         authors = [
           "RustCrypto Developers"
         ];
@@ -2264,6 +2306,7 @@ rec {
         version = "0.1.5";
         edition = "2015";
         sha256 = "1kq18qm48rvkwgcggfkqq6pm948190czqc94d6bm2sir5hq1l0gz";
+        libName = "openssl_probe";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -2351,6 +2394,7 @@ rec {
         version = "2.3.0";
         edition = "2018";
         sha256 = "152slflmparkh27hprw62sph8rv77wckzhwl2dhqk6bf563lfalv";
+        libName = "percent_encoding";
         authors = [
           "The rust-url developers"
         ];
@@ -2365,6 +2409,7 @@ rec {
         version = "1.1.3";
         edition = "2021";
         sha256 = "08k4cpy8q3j93qqgnrbzkcgpn7g0a88l4a9nm33kyghpdhffv97x";
+        libName = "pin_project";
         dependencies = [
           {
             name = "pin-project-internal";
@@ -2379,6 +2424,7 @@ rec {
         edition = "2021";
         sha256 = "01a4l3vb84brv9v7wl71chzxra2kynm6yvcjca66xv3ij6fgsna3";
         procMacro = true;
+        libName = "pin_project_internal";
         dependencies = [
           {
             name = "proc-macro2";
@@ -2401,6 +2447,7 @@ rec {
         version = "0.2.13";
         edition = "2018";
         sha256 = "0n0bwr5qxlf0mhn2xkl36sy55118s9qmvx2yl5f3ixkb007lbywa";
+        libName = "pin_project_lite";
 
       };
       "pin-utils" = rec {
@@ -2408,6 +2455,7 @@ rec {
         version = "0.1.0";
         edition = "2018";
         sha256 = "117ir7vslsl2z1a7qzhws4pd01cg2d3338c47swjyvqv2n60v1wb";
+        libName = "pin_utils";
         authors = [
           "Josef Brandl <mail@josefbrandl.de>"
         ];
@@ -2418,6 +2466,7 @@ rec {
         version = "1.0.69";
         edition = "2021";
         sha256 = "1nljgyllbm3yr3pa081bf83gxh6l4zvjqzaldw7v4mj9xfgihk0k";
+        libName = "proc_macro2";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
           "Alex Crichton <alex@alexcrichton.com>"
@@ -2976,6 +3025,7 @@ rec {
         version = "0.1.23";
         edition = "2015";
         sha256 = "0xnbk2bmyzshacjm2g1kd4zzv2y2az14bw3sjccq5qkpmsfvn9nn";
+        libName = "rustc_demangle";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -3047,6 +3097,7 @@ rec {
         version = "0.6.3";
         edition = "2021";
         sha256 = "007zind70rd5rfsrkdcfm8vn09j8sg02phg9334kark6rdscxam9";
+        libName = "rustls_native_certs";
         dependencies = [
           {
             name = "openssl-probe";
@@ -3075,6 +3126,7 @@ rec {
         version = "1.0.4";
         edition = "2018";
         sha256 = "1324n5bcns0rnw6vywr5agff3rwfvzphi7rmbyzwnv6glkhclx0w";
+        libName = "rustls_pemfile";
         dependencies = [
           {
             name = "base64";
@@ -3157,6 +3209,7 @@ rec {
         version = "2.9.2";
         edition = "2021";
         sha256 = "1pplxk15s5yxvi2m1sz5xfmjibp96cscdcl432w9jzbk0frlzdh5";
+        libName = "security_framework";
         authors = [
           "Steven Fackler <sfackler@gmail.com>"
           "Kornel <kornel@geekhood.net>"
@@ -3203,6 +3256,7 @@ rec {
         version = "2.9.1";
         edition = "2021";
         sha256 = "0yhciwlsy9dh0ps1gw3197kvyqx1bvc4knrhiznhid6kax196cp9";
+        libName = "security_framework_sys";
         authors = [
           "Steven Fackler <sfackler@gmail.com>"
           "Kornel <kornel@geekhood.net>"
@@ -3415,6 +3469,7 @@ rec {
         version = "0.1.7";
         edition = "2018";
         sha256 = "1xipjr4nqsgw34k7a2cgj9zaasl2ds6jwn89886kww93d32a637l";
+        libName = "sharded_slab";
         authors = [
           "Eliza Weisman <eliza@buoyant.io>"
         ];
@@ -3430,12 +3485,16 @@ rec {
       };
       "shlex" = rec {
         crateName = "shlex";
-        version = "1.2.0";
+        version = "1.3.0";
         edition = "2015";
-        sha256 = "1033pj9dyb76nm5yv597nnvj3zpvr2aw9rm5wy0gah3dk99f1km7";
+        sha256 = "0r1y6bv26c1scpxvhg2cabimrmwgbp4p3wy6syj9n0c4s3q2znhg";
         authors = [
           "comex <comexk@gmail.com>"
           "Fenhl <fenhl@fenhl.net>"
+          "Adrian Taylor <adetaylor@chromium.org>"
+          "Alex Touchet <alextouchet@outlook.com>"
+          "Daniel Parks <dp+git@oxidized.org>"
+          "Garrett Berg <googberg@gmail.com>"
         ];
         features = {
           "default" = [ "std" ];
@@ -3447,6 +3506,7 @@ rec {
         version = "1.4.1";
         edition = "2015";
         sha256 = "18crkkw5k82bvcx088xlf5g4n3772m24qhzgfan80nda7d3rn8nq";
+        libName = "signal_hook_registry";
         authors = [
           "Michal 'vorner' Vaner <vorner@vorner.cz>"
           "Masaki Hara <ackie.h.gmai@gmail.com>"
@@ -3649,6 +3709,7 @@ rec {
         edition = "2021";
         sha256 = "1f0lmam4765sfnwr4b1n00y14vxh10g0311mkk0adr80pi02wsr6";
         procMacro = true;
+        libName = "thiserror_impl";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -3804,6 +3865,7 @@ rec {
         edition = "2021";
         sha256 = "0fwjy4vdx1h9pi4g2nml72wi0fr27b5m954p13ji9anyy8l1x2jv";
         procMacro = true;
+        libName = "tokio_macros";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -3829,6 +3891,7 @@ rec {
         version = "0.23.4";
         edition = "2018";
         sha256 = "0nfsmmi8l1lgpbfy6079d5i13984djzcxrdr9jc06ghi0cwyhgn4";
+        libName = "tokio_rustls";
         authors = [
           "quininer kel <quininer@live.com>"
         ];
@@ -3867,6 +3930,7 @@ rec {
         version = "0.1.14";
         edition = "2021";
         sha256 = "0hi8hcwavh5sdi1ivc9qc4yvyr32f153c212dpd7sb366y6rhz1r";
+        libName = "tokio_stream";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -3910,6 +3974,7 @@ rec {
         version = "0.7.10";
         edition = "2021";
         sha256 = "058y6x4mf0fsqji9rfyb77qbfyc50y4pk2spqgj6xsyr693z66al";
+        libName = "tokio_util";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -3965,7 +4030,7 @@ rec {
           "time" = [ "tokio/time" "slab" ];
           "tracing" = [ "dep:tracing" ];
         };
-        resolvedDefaultFeatures = [ "codec" "default" "tracing" ];
+        resolvedDefaultFeatures = [ "codec" "default" "io" "tracing" ];
       };
       "tower" = rec {
         crateName = "tower";
@@ -4060,6 +4125,7 @@ rec {
         version = "0.3.2";
         edition = "2018";
         sha256 = "1l7i17k9vlssrdg4s3b0ia5jjkmmxsvv8s9y9ih0jfi8ssz8s362";
+        libName = "tower_layer";
         authors = [
           "Tower Maintainers <team@tower-rs.com>"
         ];
@@ -4070,6 +4136,7 @@ rec {
         version = "0.3.2";
         edition = "2018";
         sha256 = "0lmfzmmvid2yp2l36mbavhmqgsvzqf7r2wiwz73ml4xmwaf1rg5n";
+        libName = "tower_service";
         authors = [
           "Tower Maintainers <team@tower-rs.com>"
         ];
@@ -4128,6 +4195,7 @@ rec {
         edition = "2018";
         sha256 = "1rvb5dn9z6d0xdj14r403z0af0bbaqhg02hq4jc97g5wds6lqw1l";
         procMacro = true;
+        libName = "tracing_attributes";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
           "Eliza Weisman <eliza@buoyant.io>"
@@ -4156,6 +4224,7 @@ rec {
         version = "0.1.32";
         edition = "2018";
         sha256 = "0m5aglin3cdwxpvbg6kz0r9r0k31j48n0kcfwsp6l49z26k3svf0";
+        libName = "tracing_core";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -4186,6 +4255,7 @@ rec {
         version = "0.1.4";
         edition = "2018";
         sha256 = "1wmxawaz94sk52i4vs2wg5d5clyks972rqskrvc93rxl14ki2lgp";
+        libName = "tracing_log";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -4218,6 +4288,7 @@ rec {
         version = "0.1.3";
         edition = "2018";
         sha256 = "1qfr0va69djvxqvjrx4vqq7p6myy414lx4w1f6amcn0hfwqj2sxw";
+        libName = "tracing_serde";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -4242,6 +4313,7 @@ rec {
         version = "0.3.17";
         edition = "2018";
         sha256 = "0xvwfpmb943hdy4gzyn7a2azgigf30mfd1kx10gyh5gr6yy539ih";
+        libName = "tracing_subscriber";
         authors = [
           "Eliza Weisman <eliza@buoyant.io>"
           "David Barsky <me@davidbarsky.com>"
@@ -4336,6 +4408,7 @@ rec {
         version = "0.2.4";
         edition = "2015";
         sha256 = "1vc15paa4zi06ixsxihwbvfn24d708nsyg1ncgqwcrn42byyqa1m";
+        libName = "try_lock";
         authors = [
           "Sean McArthur <sean@seanmonstar.com>"
         ];
@@ -4352,12 +4425,7 @@ rec {
             requiredFeatures = [ ];
           }
         ];
-        # We can't filter paths with references in Nix 2.4
-        # See https://github.com/NixOS/nix/issues/5410
-        src =
-          if ((lib.versionOlder builtins.nixVersion "2.4pre20211007") || (lib.versionOlder "2.5" builtins.nixVersion))
-          then lib.cleanSourceWith { filter = sourceFilter; src = ./.; }
-          else ./.;
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ./.; };
         dependencies = [
           {
             name = "aws_lambda_events";
@@ -4462,6 +4530,7 @@ rec {
         version = "1.0.12";
         edition = "2018";
         sha256 = "0jzf1znfpb2gx8nr8mvmyqs1crnv79l57nxnbiszc7xf7ynbjm1k";
+        libName = "unicode_ident";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -4549,6 +4618,7 @@ rec {
         version = "0.2.88";
         edition = "2018";
         sha256 = "1khgsh4z9bga35mjhg41dl7523i69ffc5m8ckhqaw6ssyabc5bkx";
+        libName = "wasm_bindgen";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -4580,6 +4650,7 @@ rec {
         version = "0.2.88";
         edition = "2018";
         sha256 = "05zj8yl243rvs87rhicq2l1d6443lnm6k90khf744khf9ikg95z3";
+        libName = "wasm_bindgen_backend";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -4625,6 +4696,7 @@ rec {
         edition = "2018";
         sha256 = "1chn3wgw9awmvs0fpmazbqyc5rwfgy3pj7lzwczmzb887dxh2qar";
         procMacro = true;
+        libName = "wasm_bindgen_macro";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -4649,6 +4721,7 @@ rec {
         version = "0.2.88";
         edition = "2018";
         sha256 = "01rrzg3y1apqygsjz1jg0n7p831nm4kdyxmxyl85x7v6mf6kndf5";
+        libName = "wasm_bindgen_macro_support";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -4687,6 +4760,7 @@ rec {
         edition = "2018";
         links = "wasm_bindgen";
         sha256 = "02vmw2rzsla1qm0zgfng4kqz52xn8k54v8ads4g1macv09fnq10d";
+        libName = "wasm_bindgen_shared";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -4697,6 +4771,7 @@ rec {
         version = "0.3.65";
         edition = "2018";
         sha256 = "11ba406ca9qssc21c37v49sn2y2gsdn6c3nva4hjf8v3yv2rkd2x";
+        libName = "web_sys";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -5198,12 +5273,12 @@ rec {
           {
             name = "winapi-i686-pc-windows-gnu";
             packageId = "winapi-i686-pc-windows-gnu";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "i686-pc-windows-gnu");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "i686-pc-windows-gnu");
           }
           {
             name = "winapi-x86_64-pc-windows-gnu";
             packageId = "winapi-x86_64-pc-windows-gnu";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnu");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "x86_64-pc-windows-gnu");
           }
         ];
         features = {
@@ -5216,6 +5291,7 @@ rec {
         version = "0.4.0";
         edition = "2015";
         sha256 = "1dmpa6mvcvzz16zg6d5vrfy4bxgg541wxrcip7cnshi06v38ffxc";
+        libName = "winapi_i686_pc_windows_gnu";
         authors = [
           "Peter Atashian <retep998@gmail.com>"
         ];
@@ -5226,6 +5302,7 @@ rec {
         version = "0.4.0";
         edition = "2015";
         sha256 = "0gqq64czqb64kskjryj8isp62m2sgvx25yyj3kpc2myh85w24bki";
+        libName = "winapi_x86_64_pc_windows_gnu";
         authors = [
           "Peter Atashian <retep998@gmail.com>"
         ];
@@ -5236,6 +5313,7 @@ rec {
         version = "0.51.1";
         edition = "2021";
         sha256 = "0r1f57hsshsghjyc7ypp2s0i78f7b1vr93w68sdb8baxyf2czy7i";
+        libName = "windows_core";
         authors = [
           "Microsoft"
         ];
@@ -5253,6 +5331,7 @@ rec {
         version = "0.48.0";
         edition = "2018";
         sha256 = "1aan23v5gs7gya1lc46hqn9mdh8yph3fhxmhxlw36pn6pqc28zb7";
+        libName = "windows_sys";
         authors = [
           "Microsoft"
         ];
@@ -5546,6 +5625,7 @@ rec {
         version = "0.48.5";
         edition = "2018";
         sha256 = "034ljxqshifs1lan89xwpcy1hp0lhdh4b5n0d2z4fwjx2piacbws";
+        libName = "windows_targets";
         authors = [
           "Microsoft"
         ];
@@ -5553,7 +5633,7 @@ rec {
           {
             name = "windows_aarch64_gnullvm";
             packageId = "windows_aarch64_gnullvm";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-pc-windows-gnullvm");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "aarch64-pc-windows-gnullvm");
           }
           {
             name = "windows_aarch64_msvc";
@@ -5578,7 +5658,7 @@ rec {
           {
             name = "windows_x86_64_gnullvm";
             packageId = "windows_x86_64_gnullvm";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnullvm");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "x86_64-pc-windows-gnullvm");
           }
           {
             name = "windows_x86_64_msvc";
@@ -5722,6 +5802,7 @@ rec {
         version = "4.1.3+zstd.1.5.1";
         edition = "2018";
         sha256 = "0yfvqzzkbj871f2vaikal5rm2gf60p1mdzp3jk3w5hmkkywq37g9";
+        libName = "zstd_safe";
         authors = [
           "Alexandre Bury <alexandre.bury@gmail.com>"
         ];
@@ -5756,6 +5837,7 @@ rec {
         edition = "2018";
         links = "zstd";
         sha256 = "17xcr0mw8ps9hlc8m0dzj7yd52lb9r9ic9fbpxa4994yilj2zbrd";
+        libName = "zstd_sys";
         authors = [
           "Alexandre Bury <alexandre.bury@gmail.com>"
         ];
@@ -5794,14 +5876,11 @@ rec {
       fuchsia = true;
       test = false;
 
-      /* We are choosing an arbitrary rust version to grab `lib` from,
-      which is unfortunate, but `lib` has been version-agnostic the
-      whole time so this is good enough for now.
-      */
-      os = pkgs.rust.lib.toTargetOs platform;
-      arch = pkgs.rust.lib.toTargetArch platform;
-      family = pkgs.rust.lib.toTargetFamily platform;
-      vendor = pkgs.rust.lib.toTargetVendor platform;
+      inherit (platform.rust.platform)
+        arch
+        os
+        vendor;
+      family = platform.rust.platform.target-family;
       env = "gnu";
       endian =
         if platform.parsed.cpu.significantByte.name == "littleEndian"
@@ -5878,6 +5957,7 @@ rec {
               (
                 _: {
                   buildTests = true;
+                  release = false;
                 }
               );
             # If the user hasn't set any pre/post commands, we don't want to
@@ -5890,51 +5970,41 @@ rec {
                 testPostRun
               ]);
           in
-          pkgs.runCommand "run-tests-${testCrate.name}"
-            {
-              inherit testCrateFlags;
-              buildInputs = testInputs;
-            } ''
-            set -e
+          pkgs.stdenvNoCC.mkDerivation {
+            name = "run-tests-${testCrate.name}";
 
-            export RUST_BACKTRACE=1
+            inherit (crate) src;
 
-            # recreate a file hierarchy as when running tests with cargo
+            inherit testCrateFlags;
 
-            # the source for test data
-            # It's necessary to locate the source in $NIX_BUILD_TOP/source/
-            # instead of $NIX_BUILD_TOP/
-            # because we compiled those test binaries in the former and not the latter.
-            # So all paths will expect source tree to be there and not in the build top directly.
-            # For example: $NIX_BUILD_TOP := /build in general, if you ask yourself.
-            # TODO(raitobezarius): I believe there could be more edge cases if `crate.sourceRoot`
-            # do exist but it's very hard to reason about them, so let's wait until the first bug report.
-            mkdir -p source/
-            cd source/
+            buildInputs = testInputs;
 
-            ${pkgs.buildPackages.xorg.lndir}/bin/lndir ${crate.src}
+            buildPhase = ''
+              set -e
+              export RUST_BACKTRACE=1
 
-            # build outputs
-            testRoot=target/debug
-            mkdir -p $testRoot
+              # build outputs
+              testRoot=target/debug
+              mkdir -p $testRoot
 
-            # executables of the crate
-            # we copy to prevent std::env::current_exe() to resolve to a store location
-            for i in ${crate}/bin/*; do
-              cp "$i" "$testRoot"
-            done
-            chmod +w -R .
+              # executables of the crate
+              # we copy to prevent std::env::current_exe() to resolve to a store location
+              for i in ${crate}/bin/*; do
+                cp "$i" "$testRoot"
+              done
+              chmod +w -R .
 
-            # test harness executables are suffixed with a hash, like cargo does
-            # this allows to prevent name collision with the main
-            # executables of the crate
-            hash=$(basename $out)
-            for file in ${drv}/tests/*; do
-              f=$testRoot/$(basename $file)-$hash
-              cp $file $f
-              ${testCommand}
-            done
-          '';
+              # test harness executables are suffixed with a hash, like cargo does
+              # this allows to prevent name collision with the main
+              # executables of the crate
+              hash=$(basename $out)
+              for file in ${drv}/tests/*; do
+                f=$testRoot/$(basename $file)-$hash
+                cp $file $f
+                ${testCommand}
+              done
+            '';
+          };
       in
       pkgs.runCommand "${crate.name}-linked"
         {
@@ -6043,7 +6113,7 @@ rec {
             let
               self = {
                 crates = lib.mapAttrs (packageId: value: buildByPackageIdForPkgsImpl self pkgs packageId) crateConfigs;
-                target = makeTarget pkgs.stdenv.hostPlatform;
+                target = makeTarget stdenv.hostPlatform;
                 build = mkBuiltByPackageIdByPkgs pkgs.buildPackages;
               };
             in
@@ -6118,8 +6188,6 @@ rec {
             buildRustCrateForPkgsFunc pkgs
               (
                 crateConfig // {
-                  # https://github.com/NixOS/nixpkgs/issues/218712
-                  dontStrip = stdenv.hostPlatform.isDarwin;
                   src = crateConfig.src or (
                     pkgs.fetchurl rec {
                       name = "${crateConfig.crateName}-${crateConfig.version}.tar.gz";
diff --git a/tvix/tools/turbofetch/Cargo.toml b/tvix/tools/turbofetch/Cargo.toml
index 6b2f0e752020..65a7be9a55b0 100644
--- a/tvix/tools/turbofetch/Cargo.toml
+++ b/tvix/tools/turbofetch/Cargo.toml
@@ -10,7 +10,7 @@ members = ["."]
 [dependencies]
 aws_lambda_events = { version = "0.11.1", default-features = false, features = ["lambda_function_urls"] }
 bytes = "1.5.0"
-data-encoding = "2.4.0"
+data-encoding = "2.6.0"
 futures = { version = "0.3.30", default-features = false, features = ["std"] }
 httparse = "1.8.0"
 hyper = { version = "0.14.27", default-features = false }
diff --git a/tvix/tools/turbofetch/default.nix b/tvix/tools/turbofetch/default.nix
index bd70f6a5e694..a98b2f0b4330 100644
--- a/tvix/tools/turbofetch/default.nix
+++ b/tvix/tools/turbofetch/default.nix
@@ -1,12 +1,11 @@
-{ lib, pkgs, ... }:
+{ pkgs, depot, ... }:
 
 (pkgs.callPackage ./Cargo.nix {
-  defaultCrateOverrides = pkgs.defaultCrateOverrides // {
-
-    ring = prev: {
-      links = ''ring_core_${lib.replaceStrings ["."] ["_"] prev.version}'';
+  defaultCrateOverrides = (depot.tvix.utils.defaultCrateOverridesForPkgs pkgs) // {
+    turbofetch = prev: {
+      src = depot.tvix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
     };
   };
-}).rootCrate.build.override {
-  runTests = true;
+}).rootCrate.build.overrideAttrs {
+  meta.ci.extraSteps.crate2nix-check = depot.tvix.utils.mkCrate2nixCheck ./Cargo.nix;
 }
diff --git a/tvix/tools/weave/Cargo.lock b/tvix/tools/weave/Cargo.lock
index 46a90cb4822f..bb571af1a20d 100644
--- a/tvix/tools/weave/Cargo.lock
+++ b/tvix/tools/weave/Cargo.lock
@@ -271,9 +271,9 @@ dependencies = [
 
 [[package]]
 name = "bytes"
-version = "1.5.0"
+version = "1.6.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223"
+checksum = "a12916984aab3fa6e39d655a33e09c0071eb36d6ab3aea5c2d78551f1df6d952"
 
 [[package]]
 name = "cc"
@@ -422,16 +422,15 @@ dependencies = [
 
 [[package]]
 name = "curve25519-dalek"
-version = "4.1.2"
+version = "4.1.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0a677b8922c94e01bdbb12126b0bc852f00447528dee1782229af9c720c3f348"
+checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be"
 dependencies = [
  "cfg-if",
  "cpufeatures",
  "curve25519-dalek-derive",
  "digest",
  "fiat-crypto",
- "platforms",
  "rustc_version",
  "subtle",
  "zeroize",
@@ -450,9 +449,9 @@ dependencies = [
 
 [[package]]
 name = "data-encoding"
-version = "2.5.0"
+version = "2.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5"
+checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2"
 
 [[package]]
 name = "der"
@@ -511,6 +510,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
 
 [[package]]
+name = "enum-primitive-derive"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba7795da175654fe16979af73f81f26a8ea27638d8d9823d317016888a63dc4c"
+dependencies = [
+ "num-traits",
+ "quote",
+ "syn 2.0.48",
+]
+
+[[package]]
 name = "enum_dispatch"
 version = "0.3.12"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -794,6 +804,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
 
 [[package]]
+name = "libmimalloc-sys"
+version = "0.1.39"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "23aa6811d3bd4deb8a84dde645f943476d13b248d818edcf8ce0b2f37f036b44"
+dependencies = [
+ "cc",
+ "libc",
+]
+
+[[package]]
 name = "lock_api"
 version = "0.4.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -845,6 +865,15 @@ dependencies = [
 ]
 
 [[package]]
+name = "mimalloc"
+version = "0.1.43"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68914350ae34959d83f732418d51e2427a794055d0b9529f48259ac07af65633"
+dependencies = [
+ "libmimalloc-sys",
+]
+
+[[package]]
 name = "minimal-lexical"
 version = "0.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -861,9 +890,9 @@ dependencies = [
 
 [[package]]
 name = "mio"
-version = "0.8.10"
+version = "0.8.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09"
+checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c"
 dependencies = [
  "libc",
  "wasi",
@@ -898,15 +927,22 @@ version = "0.1.0"
 dependencies = [
  "bitflags 2.4.2",
  "bstr",
+ "bytes",
  "data-encoding",
  "ed25519",
  "ed25519-dalek",
+ "enum-primitive-derive",
  "glob",
+ "mimalloc",
  "nom",
+ "num-traits",
+ "pin-project-lite",
  "serde",
  "serde_json",
  "sha2",
  "thiserror",
+ "tokio",
+ "tracing",
 ]
 
 [[package]]
@@ -1058,12 +1094,6 @@ dependencies = [
 ]
 
 [[package]]
-name = "platforms"
-version = "3.3.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "626dec3cac7cc0e1577a2ec3fc496277ec2baa084bebad95bb6fdbfae235f84c"
-
-[[package]]
 name = "polars"
 version = "0.36.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1849,6 +1879,37 @@ dependencies = [
 ]
 
 [[package]]
+name = "tracing"
+version = "0.1.40"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
+dependencies = [
+ "pin-project-lite",
+ "tracing-attributes",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-attributes"
+version = "0.1.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.48",
+]
+
+[[package]]
+name = "tracing-core"
+version = "0.1.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
+dependencies = [
+ "once_cell",
+]
+
+[[package]]
 name = "typenum"
 version = "1.17.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/tvix/tools/weave/Cargo.nix b/tvix/tools/weave/Cargo.nix
index 650b655a2df0..25b2c94a3f11 100644
--- a/tvix/tools/weave/Cargo.nix
+++ b/tvix/tools/weave/Cargo.nix
@@ -1,4 +1,4 @@
-# This file was @generated by crate2nix 0.13.0 with the command:
+# This file was @generated by crate2nix 0.14.1 with the command:
 #   "generate" "--all-features"
 # See https://github.com/kolloch/crate2nix for more info.
 
@@ -13,6 +13,8 @@
 , rootFeatures ? [ "default" ]
   # If true, throw errors instead of issueing deprecation warnings.
 , strictDeprecation ? false
+  # Elements to add to the `-C target-feature=` argument passed to `rustc`
+  # (separated by `,`, prefixed with `+`).
   # Used for conditional compilation based on CPU feature detection.
 , targetFeatures ? [ ]
   # Whether to perform release builds: longer compile times, faster binaries.
@@ -205,6 +207,7 @@ rec {
         edition = "2015";
         crateBin = [ ];
         sha256 = "1cy6r2sfv5y5cigv86vms7n5nlwhx1rbyxwcraqnmm1rxiib2yyc";
+        libName = "alloc_no_stdlib";
         authors = [
           "Daniel Reiter Horn <danielrh@dropbox.com>"
         ];
@@ -216,6 +219,7 @@ rec {
         edition = "2015";
         crateBin = [ ];
         sha256 = "1kkfbld20ab4165p29v172h8g0wvq8i06z8vnng14whw0isq5ywl";
+        libName = "alloc_stdlib";
         authors = [
           "Daniel Reiter Horn <danielrh@dropbox.com>"
         ];
@@ -232,6 +236,7 @@ rec {
         version = "0.2.16";
         edition = "2018";
         sha256 = "1iayppgq4wqbfbfcqmsbwgamj0s65012sskfvyx07pxavk3gyhh9";
+        libName = "allocator_api2";
         authors = [
           "Zakarum <zaq.dev@icloud.com>"
         ];
@@ -247,6 +252,7 @@ rec {
         version = "0.1.1";
         edition = "2018";
         sha256 = "1w7ynjxrfs97xg3qlcdns4kgfpwcdv824g611fq32cag4cdr96g9";
+        libName = "android_tzdata";
         authors = [
           "RumovZ"
         ];
@@ -318,6 +324,7 @@ rec {
         version = "0.2.0";
         edition = "2021";
         sha256 = "0xpbqf7qkvzplpjd7f0wbcf2n1v9vygdccwxkd1amxp4il0hlzdz";
+        libName = "array_init_cursor";
 
       };
       "arrow-format" = rec {
@@ -325,6 +332,7 @@ rec {
         version = "0.8.1";
         edition = "2018";
         sha256 = "1irj67p6c224dzw86jr7j3z9r5zfid52gy6ml8rdqk4r2si4x207";
+        libName = "arrow_format";
         authors = [
           "Jorge C. Leitao <jorgecarleitao@gmail.com>"
         ];
@@ -360,6 +368,7 @@ rec {
         version = "0.3.5";
         edition = "2018";
         sha256 = "0l8sjq1rylkb1ak0pdyjn83b3k6x36j22myngl4sqqgg7whdsmnd";
+        libName = "async_stream";
         authors = [
           "Carl Lerche <me@carllerche.com>"
         ];
@@ -385,6 +394,7 @@ rec {
         edition = "2018";
         sha256 = "14q179j4y8p2z1d0ic6aqgy9fhwz8p9cai1ia8kpw4bw7q12mrhn";
         procMacro = true;
+        libName = "async_stream_impl";
         authors = [
           "Carl Lerche <me@carllerche.com>"
         ];
@@ -411,6 +421,7 @@ rec {
         edition = "2021";
         sha256 = "1adf1jh2yg39rkpmqjqyr9xyd6849p0d95425i6imgbhx0syx069";
         procMacro = true;
+        libName = "async_trait";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -602,6 +613,7 @@ rec {
         version = "0.10.4";
         edition = "2018";
         sha256 = "0w9sa2ypmrsqqvc20nhwr75wbb5cjr4kkyhpjm1z1lv2kdicfy1h";
+        libName = "block_buffer";
         authors = [
           "RustCrypto Developers"
         ];
@@ -657,6 +669,7 @@ rec {
         edition = "2015";
         crateBin = [ ];
         sha256 = "0kyyh9701dwqzwvn2frff4ww0zibikqd1s1xvl7n1pfpc3z4lbjf";
+        libName = "brotli_decompressor";
         authors = [
           "Daniel Reiter Horn <danielrh@dropbox.com>"
           "The Brotli Authors"
@@ -779,9 +792,9 @@ rec {
       };
       "bytes" = rec {
         crateName = "bytes";
-        version = "1.5.0";
+        version = "1.6.1";
         edition = "2018";
-        sha256 = "08w2i8ac912l8vlvkv3q51cd4gr09pwlg3sjsjffcizlrb0i5gd2";
+        sha256 = "0lnryqfiymbq5mfflfmbsqvfnw80kkh36nk5kpiscgxb9ac1cad1";
         authors = [
           "Carl Lerche <me@carllerche.com>"
           "Sean McArthur <sean@seanmonstar.com>"
@@ -825,6 +838,7 @@ rec {
         version = "1.0.0";
         edition = "2018";
         sha256 = "1za0vb97n4brpzpv8lsbnzmq5r8f2b0cpqqr0sy8h5bn751xxwds";
+        libName = "cfg_if";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -895,6 +909,7 @@ rec {
         version = "7.1.0";
         edition = "2021";
         sha256 = "11i6sm6vznv9982hqpbrba43vfd7vv7zqzlywdc4qykvdhyh8r3w";
+        libName = "comfy_table";
         authors = [
           "Arne Beer <contact@arne.beer>"
         ];
@@ -942,6 +957,7 @@ rec {
         version = "0.9.6";
         edition = "2021";
         sha256 = "1y0jnqaq7p2wvspnx7qj76m7hjcqpz73qzvr9l2p9n2s51vr6if2";
+        libName = "const_oid";
         authors = [
           "RustCrypto Developers"
         ];
@@ -954,6 +970,7 @@ rec {
         version = "0.8.6";
         edition = "2018";
         sha256 = "13w6sdf06r0hn7bx2b45zxsg1mm2phz34jikm6xc5qrbr6djpsh6";
+        libName = "core_foundation_sys";
         authors = [
           "The Servo Project Developers"
         ];
@@ -974,7 +991,7 @@ rec {
           {
             name = "libc";
             packageId = "libc";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-linux-android");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "aarch64-linux-android");
           }
           {
             name = "libc";
@@ -1019,6 +1036,7 @@ rec {
         version = "0.5.11";
         edition = "2021";
         sha256 = "16v48qdflpw3hgdik70bhsj7hympna79q7ci47rw0mlgnxsw2v8p";
+        libName = "crossbeam_channel";
         dependencies = [
           {
             name = "crossbeam-utils";
@@ -1037,6 +1055,7 @@ rec {
         version = "0.8.5";
         edition = "2021";
         sha256 = "03bp38ljx4wj6vvy4fbhx41q8f585zyqix6pncz1mkz93z08qgv1";
+        libName = "crossbeam_deque";
         dependencies = [
           {
             name = "crossbeam-epoch";
@@ -1060,6 +1079,7 @@ rec {
         version = "0.9.18";
         edition = "2021";
         sha256 = "03j2np8llwf376m3fxqx859mgp9f83hj1w34153c7a9c7i5ar0jv";
+        libName = "crossbeam_epoch";
         dependencies = [
           {
             name = "crossbeam-utils";
@@ -1081,6 +1101,7 @@ rec {
         version = "0.3.11";
         edition = "2021";
         sha256 = "0d8y8y3z48r9javzj67v3p2yfswd278myz1j9vzc4sp7snslc0yz";
+        libName = "crossbeam_queue";
         dependencies = [
           {
             name = "crossbeam-utils";
@@ -1100,6 +1121,7 @@ rec {
         version = "0.8.19";
         edition = "2021";
         sha256 = "0iakrb1b8fjqrag7wphl94d10irhbh2fw1g444xslsywqyn3p3i4";
+        libName = "crossbeam_utils";
         features = {
           "default" = [ "std" ];
           "loom" = [ "dep:loom" ];
@@ -1176,6 +1198,7 @@ rec {
         version = "0.1.6";
         edition = "2018";
         sha256 = "1cvby95a6xg7kxdz5ln3rl9xh66nz66w46mm3g56ri1z5x815yqv";
+        libName = "crypto_common";
         authors = [
           "RustCrypto Developers"
         ];
@@ -1198,9 +1221,10 @@ rec {
       };
       "curve25519-dalek" = rec {
         crateName = "curve25519-dalek";
-        version = "4.1.2";
+        version = "4.1.3";
         edition = "2021";
-        sha256 = "0j7kqchcgycs4a11gvlda93h9w2jr05nn4hjpfyh2kn94a4pnrqa";
+        sha256 = "1gmjb9dsknrr8lypmhkyjd67p1arb8mbfamlwxm7vph38my8pywp";
+        libName = "curve25519_dalek";
         authors = [
           "Isis Lovecruft <isis@patternsinthevoid.net>"
           "Henry de Valence <hdevalence@hdevalence.ca>"
@@ -1246,10 +1270,6 @@ rec {
         ];
         buildDependencies = [
           {
-            name = "platforms";
-            packageId = "platforms";
-          }
-          {
             name = "rustc_version";
             packageId = "rustc_version";
           }
@@ -1273,6 +1293,7 @@ rec {
         edition = "2021";
         sha256 = "1cry71xxrr0mcy5my3fb502cwfxy6822k4pm19cwrilrg7hq4s7l";
         procMacro = true;
+        libName = "curve25519_dalek_derive";
         dependencies = [
           {
             name = "proc-macro2";
@@ -1292,9 +1313,10 @@ rec {
       };
       "data-encoding" = rec {
         crateName = "data-encoding";
-        version = "2.5.0";
+        version = "2.6.0";
         edition = "2018";
-        sha256 = "1rcbnwfmfxhlshzbn3r7srm3azqha3mn33yxyqxkzz2wpqcjm5ky";
+        sha256 = "1qnn68n4vragxaxlkqcb1r28d3hhj43wch67lm4rpxlw89wnjmp8";
+        libName = "data_encoding";
         authors = [
           "Julien Cretin <git@ia0.eu>"
         ];
@@ -1378,6 +1400,7 @@ rec {
         version = "1.0.16";
         edition = "2018";
         sha256 = "0pa9kas6a241pbx0q82ipwi4f7m7wwyzkkc725caky24gl4j4nsl";
+        libName = "dyn_clone";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -1420,6 +1443,7 @@ rec {
         version = "2.1.1";
         edition = "2021";
         sha256 = "0w88cafwglg9hjizldbmlza0ns3hls81zk1bcih3m5m3h67algaa";
+        libName = "ed25519_dalek";
         authors = [
           "isis lovecruft <isis@patternsinthevoid.net>"
           "Tony Arcieri <bascule@gmail.com>"
@@ -1506,6 +1530,33 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "use_std" ];
       };
+      "enum-primitive-derive" = rec {
+        crateName = "enum-primitive-derive";
+        version = "0.3.0";
+        edition = "2018";
+        sha256 = "0k6wcf58h5kh64yq5nfq71va53kaya0kzxwsjwbgwm2n2zd9axxs";
+        procMacro = true;
+        libName = "enum_primitive_derive";
+        authors = [
+          "Doug Goldstein <cardoe@cardoe.com>"
+        ];
+        dependencies = [
+          {
+            name = "num-traits";
+            packageId = "num-traits";
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "quote";
+            packageId = "quote";
+          }
+          {
+            name = "syn";
+            packageId = "syn 2.0.48";
+          }
+        ];
+
+      };
       "enum_dispatch" = rec {
         crateName = "enum_dispatch";
         version = "0.3.12";
@@ -1562,6 +1613,7 @@ rec {
         version = "0.1.9";
         edition = "2015";
         sha256 = "0nj6j26p71bjy8h42x6jahx1hn0ng6mc2miwpgwnp8vnwqf4jq3k";
+        libName = "fallible_streaming_iterator";
         authors = [
           "Steven Fackler <sfackler@gmail.com>"
         ];
@@ -1572,6 +1624,7 @@ rec {
         version = "0.2.0";
         edition = "2018";
         sha256 = "0g7kfll3xyh99kc7r352lhljnwvgayxxa6saifb6725inikmyxlm";
+        libName = "fast_float";
         authors = [
           "Ivan Smirnov <i.s.smirnov@gmail.com>"
         ];
@@ -1585,6 +1638,7 @@ rec {
         version = "0.2.6";
         edition = "2018";
         sha256 = "10hkkkjynhibvchznkxx81gwxqarn9i5sgz40d6xxb8xzhsz8xhn";
+        libName = "fiat_crypto";
         authors = [
           "Fiat Crypto library authors <jgross@mit.edu>"
         ];
@@ -1714,6 +1768,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "0y6b7xxqdjm9hlcjpakcg41qfl7lihf6gavk8fyqijsxhvbzgj7a";
+        libName = "futures_channel";
         dependencies = [
           {
             name = "futures-core";
@@ -1741,6 +1796,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "07aslayrn3lbggj54kci0ishmd1pr367fp7iks7adia1p05miinz";
+        libName = "futures_core";
         features = {
           "default" = [ "std" ];
           "portable-atomic" = [ "dep:portable-atomic" ];
@@ -1753,6 +1809,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "07dh08gs9vfll2h36kq32q9xd86xm6lyl9xikmmwlkqnmrrgqxm5";
+        libName = "futures_executor";
         dependencies = [
           {
             name = "futures-core";
@@ -1783,6 +1840,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "1hgh25isvsr4ybibywhr4dpys8mjnscw4wfxxwca70cn1gi26im4";
+        libName = "futures_io";
         features = {
           "default" = [ "std" ];
         };
@@ -1794,6 +1852,7 @@ rec {
         edition = "2018";
         sha256 = "1b49qh9d402y8nka4q6wvvj0c88qq91wbr192mdn5h54nzs0qxc7";
         procMacro = true;
+        libName = "futures_macro";
         dependencies = [
           {
             name = "proc-macro2";
@@ -1816,6 +1875,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "1dag8xyyaya8n8mh8smx7x6w2dpmafg2din145v973a3hw7f1f4z";
+        libName = "futures_sink";
         features = {
           "default" = [ "std" ];
           "std" = [ "alloc" ];
@@ -1827,6 +1887,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "013h1724454hj8qczp8vvs10qfiqrxr937qsrv6rhii68ahlzn1q";
+        libName = "futures_task";
         features = {
           "default" = [ "std" ];
           "std" = [ "alloc" ];
@@ -1838,6 +1899,7 @@ rec {
         version = "0.3.30";
         edition = "2018";
         sha256 = "0j0xqhcir1zf2dcbpd421kgw6wvsk0rpxflylcysn1rlp3g02r1x";
+        libName = "futures_util";
         dependencies = [
           {
             name = "futures-channel";
@@ -2091,6 +2153,7 @@ rec {
         version = "0.3.5";
         edition = "2021";
         sha256 = "1hw2bxkzyvr0rbnpj0lkasi8h8qf3lyb63hp760cn22fjqaj3inh";
+        libName = "hermit_abi";
         authors = [
           "Stefan Lankes"
         ];
@@ -2125,6 +2188,7 @@ rec {
         version = "0.1.60";
         edition = "2018";
         sha256 = "0hdid5xz3jznm04lysjm3vi93h3c523w0hcc3xba47jl3ddbpzz7";
+        libName = "iana_time_zone";
         authors = [
           "Andrew Straw <strawman@astraw.com>"
           "Renรฉ Kijewski <rene.kijewski@fu-berlin.de>"
@@ -2170,6 +2234,7 @@ rec {
         version = "0.1.2";
         edition = "2018";
         sha256 = "17r6jmj31chn7xs9698r122mapq85mfnv98bb4pg6spm0si2f67k";
+        libName = "iana_time_zone_haiku";
         authors = [
           "Renรฉ Kijewski <crates.io@k6i.de>"
         ];
@@ -2243,6 +2308,7 @@ rec {
         version = "0.3.68";
         edition = "2018";
         sha256 = "1vm98fhnhs4w6yakchi9ip7ar95900k9vkr24a21qlwd6r5xlv20";
+        libName = "js_sys";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -2284,6 +2350,33 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" ];
       };
+      "libmimalloc-sys" = rec {
+        crateName = "libmimalloc-sys";
+        version = "0.1.39";
+        edition = "2018";
+        links = "mimalloc";
+        sha256 = "0i3b0dzz7cp0ik7ys66q92r16va78gwlbrnxhj5fnkdxsc8niai3";
+        libName = "libmimalloc_sys";
+        authors = [
+          "Octavian Oncescu <octavonce@gmail.com>"
+        ];
+        dependencies = [
+          {
+            name = "libc";
+            packageId = "libc";
+          }
+        ];
+        buildDependencies = [
+          {
+            name = "cc";
+            packageId = "cc";
+          }
+        ];
+        features = {
+          "cty" = [ "dep:cty" ];
+          "extended" = [ "cty" ];
+        };
+      };
       "lock_api" = rec {
         crateName = "lock_api";
         version = "0.4.11";
@@ -2360,6 +2453,7 @@ rec {
         edition = "2015";
         links = "lz4";
         sha256 = "0059ik4xlvnss5qfh6l691psk4g3350ljxaykzv10yr0gqqppljp";
+        libName = "lz4_sys";
         authors = [
           "Jens Heyens <jens.heyens@ewetel.net>"
           "Artem V. Navrotskiy <bozaro@buzzsoft.ru>"
@@ -2419,11 +2513,40 @@ rec {
           "stable_deref_trait" = [ "dep:stable_deref_trait" ];
         };
       };
+      "mimalloc" = rec {
+        crateName = "mimalloc";
+        version = "0.1.43";
+        edition = "2018";
+        sha256 = "0csnyrxc16i592gm5ffham07jyj2w98qsh9jyy1rv59lmr8474b8";
+        authors = [
+          "Octavian Oncescu <octavonce@gmail.com>"
+          "Vincent Rouillรฉ <vincent@speedy37.fr>"
+          "Thom Chiovoloni <chiovolonit@gmail.com>"
+        ];
+        dependencies = [
+          {
+            name = "libmimalloc-sys";
+            packageId = "libmimalloc-sys";
+            usesDefaultFeatures = false;
+          }
+        ];
+        features = {
+          "debug" = [ "libmimalloc-sys/debug" ];
+          "debug_in_debug" = [ "libmimalloc-sys/debug_in_debug" ];
+          "extended" = [ "libmimalloc-sys/extended" ];
+          "local_dynamic_tls" = [ "libmimalloc-sys/local_dynamic_tls" ];
+          "no_thp" = [ "libmimalloc-sys/no_thp" ];
+          "override" = [ "libmimalloc-sys/override" ];
+          "secure" = [ "libmimalloc-sys/secure" ];
+        };
+        resolvedDefaultFeatures = [ "default" ];
+      };
       "minimal-lexical" = rec {
         crateName = "minimal-lexical";
         version = "0.2.1";
         edition = "2018";
         sha256 = "16ppc5g84aijpri4jzv14rvcnslvlpphbszc7zzp6vfkddf4qdb8";
+        libName = "minimal_lexical";
         authors = [
           "Alex Huszagh <ahuszagh@gmail.com>"
         ];
@@ -2461,9 +2584,9 @@ rec {
       };
       "mio" = rec {
         crateName = "mio";
-        version = "0.8.10";
+        version = "0.8.11";
         edition = "2018";
-        sha256 = "02gyaxvaia9zzi4drrw59k9s0j6pa5d1y2kv7iplwjipdqlhngcg";
+        sha256 = "034byyl0ardml5yliy1hmvx8arkmn9rv479pid794sm07ia519m4";
         authors = [
           "Carl Lerche <me@carllerche.com>"
           "Thomas de Zeeuw <thomasdezeeuw@gmail.com>"
@@ -2530,6 +2653,7 @@ rec {
         edition = "2021";
         sha256 = "1j1avbxw7jscyi7dmnywhlwbiny1fvg1vpp9fy4dc1pd022kva16";
         procMacro = true;
+        libName = "multiversion_macros";
         authors = [
           "Caleb Zulawski <caleb.zulawski@gmail.com>"
         ];
@@ -2562,12 +2686,8 @@ rec {
         version = "0.1.0";
         edition = "2021";
         crateBin = [ ];
-        # We can't filter paths with references in Nix 2.4
-        # See https://github.com/NixOS/nix/issues/5410
-        src =
-          if ((lib.versionOlder builtins.nixVersion "2.4pre20211007") || (lib.versionOlder "2.5" builtins.nixVersion))
-          then lib.cleanSourceWith { filter = sourceFilter; src = ../../nix-compat; }
-          else ../../nix-compat;
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ../../nix-compat; };
+        libName = "nix_compat";
         dependencies = [
           {
             name = "bitflags";
@@ -2579,6 +2699,11 @@ rec {
             features = [ "alloc" "unicode" "serde" ];
           }
           {
+            name = "bytes";
+            packageId = "bytes";
+            optional = true;
+          }
+          {
             name = "data-encoding";
             packageId = "data-encoding";
           }
@@ -2591,14 +2716,31 @@ rec {
             packageId = "ed25519-dalek";
           }
           {
+            name = "enum-primitive-derive";
+            packageId = "enum-primitive-derive";
+          }
+          {
             name = "glob";
             packageId = "glob";
           }
           {
+            name = "mimalloc";
+            packageId = "mimalloc";
+          }
+          {
             name = "nom";
             packageId = "nom";
           }
           {
+            name = "num-traits";
+            packageId = "num-traits";
+          }
+          {
+            name = "pin-project-lite";
+            packageId = "pin-project-lite";
+            optional = true;
+          }
+          {
             name = "serde";
             packageId = "serde";
             features = [ "derive" ];
@@ -2615,17 +2757,36 @@ rec {
             name = "thiserror";
             packageId = "thiserror";
           }
+          {
+            name = "tokio";
+            packageId = "tokio";
+            optional = true;
+            features = [ "io-util" "macros" ];
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+          }
         ];
         devDependencies = [
           {
+            name = "mimalloc";
+            packageId = "mimalloc";
+          }
+          {
             name = "serde_json";
             packageId = "serde_json";
           }
         ];
         features = {
-          "async" = [ "futures-util" ];
-          "futures-util" = [ "dep:futures-util" ];
+          "async" = [ "tokio" ];
+          "bytes" = [ "dep:bytes" ];
+          "default" = [ "async" "wire" ];
+          "pin-project-lite" = [ "dep:pin-project-lite" ];
+          "tokio" = [ "dep:tokio" ];
+          "wire" = [ "tokio" "pin-project-lite" "bytes" ];
         };
+        resolvedDefaultFeatures = [ "async" "bytes" "default" "pin-project-lite" "tokio" "wire" ];
       };
       "nom" = rec {
         crateName = "nom";
@@ -2697,6 +2858,7 @@ rec {
         version = "0.2.18";
         edition = "2018";
         sha256 = "0yjib8p2p9kzmaz48xwhs69w5dh1wipph9jgnillzd2x33jz03fs";
+        libName = "num_traits";
         authors = [
           "The Rust Project Developers"
         ];
@@ -2880,6 +3042,7 @@ rec {
         version = "0.2.4";
         edition = "2021";
         sha256 = "07wf6wf4jrxlq5p3xldxsnabp7jl06my2qp7kiwy9m3x2r5wac8i";
+        libName = "parquet_format_safe";
         authors = [
           "Apache Thrift contributors <dev@thrift.apache.org>"
           "Jorge Leitao <jorgecarleitao@gmail.com>"
@@ -2909,6 +3072,7 @@ rec {
         version = "2.3.1";
         edition = "2018";
         sha256 = "0gi8wgx0dcy8rnv1kywdv98lwcx67hz0a0zwpib5v2i08r88y573";
+        libName = "percent_encoding";
         authors = [
           "The rust-url developers"
         ];
@@ -2923,6 +3087,7 @@ rec {
         version = "0.2.13";
         edition = "2018";
         sha256 = "0n0bwr5qxlf0mhn2xkl36sy55118s9qmvx2yl5f3ixkb007lbywa";
+        libName = "pin_project_lite";
 
       };
       "pin-utils" = rec {
@@ -2930,6 +3095,7 @@ rec {
         version = "0.1.0";
         edition = "2018";
         sha256 = "117ir7vslsl2z1a7qzhws4pd01cg2d3338c47swjyvqv2n60v1wb";
+        libName = "pin_utils";
         authors = [
           "Josef Brandl <mail@josefbrandl.de>"
         ];
@@ -2974,6 +3140,7 @@ rec {
         version = "0.3.29";
         edition = "2015";
         sha256 = "1jy6158v1316khkpmq2sjj1vgbnbnw51wffx7p0k0l9h9vlys019";
+        libName = "pkg_config";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -2995,21 +3162,6 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "std" ];
       };
-      "platforms" = rec {
-        crateName = "platforms";
-        version = "3.3.0";
-        edition = "2018";
-        sha256 = "0k7q6pigmnvgpfasvssb12m2pv3pc94zrhrfg9by3h3wmhyfqvb2";
-        authors = [
-          "Tony Arcieri <bascule@gmail.com>"
-          "Sergey \"Shnatsel\" Davidoff <shnatsel@gmail.com>"
-        ];
-        features = {
-          "default" = [ "std" ];
-          "serde" = [ "dep:serde" ];
-        };
-        resolvedDefaultFeatures = [ "default" "std" ];
-      };
       "polars" = rec {
         crateName = "polars";
         version = "0.36.2";
@@ -3216,6 +3368,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "0vxql6amvwyp6qj0w87vm21y33qa0i61pshs4ry7ixwgd4ps0s6f";
+        libName = "polars_arrow";
         authors = [
           "Jorge C. Leitao <jorgecarleitao@gmail.com>"
           "Apache Arrow <dev@arrow.apache.org>"
@@ -3277,7 +3430,7 @@ rec {
           {
             name = "getrandom";
             packageId = "getrandom";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "wasm32-unknown-unknown");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "wasm32-unknown-unknown");
             features = [ "js" ];
           }
           {
@@ -3394,6 +3547,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "1pa4l1w41gdzdff81ih1z05z3gz568k81i6flibbca8v2igvqkxi";
+        libName = "polars_compute";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -3433,6 +3587,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "08sar9h97znpb8ziyvrrvvx28lyzc21vwsd7gvwybjxn6kkyzxfh";
+        libName = "polars_core";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -3593,6 +3748,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "11m80a899kp45b3dz9jbvrn5001hw8izbdp7d2czrswrixwdx5k3";
+        libName = "polars_error";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -3629,6 +3785,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "1smamd34ghlxyxdd4aqdplk7k8xm1l626brmzlc4fvwlx3pmh13x";
+        libName = "polars_io";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -3825,6 +3982,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "1mdml4hs574njb23mb9gl6x92x2bb4vdfzsazkl3ifq516s0awcx";
+        libName = "polars_lazy";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -4019,6 +4177,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "13m7dh4vpdmcah04a7kql933l7y7045f0hybbmgff4dbav2ay29f";
+        libName = "polars_ops";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -4170,6 +4329,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "0gay037sw5hcg2q93pxcfh7krcchl1px8g838d8vhjpnn5klv8kv";
+        libName = "polars_parquet";
         authors = [
           "Jorge C. Leitao <jorgecarleitao@gmail.com>"
           "Apache Arrow <dev@arrow.apache.org>"
@@ -4290,6 +4450,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "00217q51mnq7i57k3sax293xnwghm5hridbpgl11fffcfg8fmdyr";
+        libName = "polars_pipe";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -4403,6 +4564,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "1l5ca38v7ksq4595wdr6wsd74pdgszwivq9y8wfc6l6h4ib1fjiq";
+        libName = "polars_plan";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -4591,6 +4753,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "0by7x6jlj5dwr3f1gj49v8w65l3kpqhwhzb9qzlv6gdqrdx2ycij";
+        libName = "polars_row";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -4620,6 +4783,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "1dcmm993gycw75a6c5hxcr6h2cy6fa2r3hsbx19h9zgxvxnlq2wz";
+        libName = "polars_sql";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -4684,6 +4848,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "1l24fmpv5v1qshxfd4592z8x6bnjlwy4njhf9rcbdlbbr6gn9qny";
+        libName = "polars_time";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -4762,6 +4927,7 @@ rec {
         version = "0.36.2";
         edition = "2021";
         sha256 = "1nmvfqwyzbaxcw457amspz479y5vcnpalq043awxfixdfx5clx5i";
+        libName = "polars_utils";
         authors = [
           "Ritchie Vink <ritchie46@gmail.com>"
         ];
@@ -4829,6 +4995,7 @@ rec {
         version = "0.2.17";
         edition = "2018";
         sha256 = "1pp6g52aw970adv3x2310n7glqnji96z0a9wiamzw89ibf0ayh2v";
+        libName = "ppv_lite86";
         authors = [
           "The CryptoCorrosion Contributors"
         ];
@@ -4842,6 +5009,7 @@ rec {
         version = "1.0.78";
         edition = "2021";
         sha256 = "1bjak27pqdn4f4ih1c9nr3manzyavsgqmf76ygw9k76q8pb2lhp2";
+        libName = "proc_macro2";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
           "Alex Crichton <alex@alexcrichton.com>"
@@ -5045,6 +5213,7 @@ rec {
         edition = "2021";
         links = "rayon-core";
         sha256 = "1qpwim68ai5h0j7axa8ai8z0payaawv3id0lrgkqmapx7lx8fr8l";
+        libName = "rayon_core";
         authors = [
           "Niko Matsakis <niko@alum.mit.edu>"
           "Josh Stone <cuviper@gmail.com>"
@@ -5146,6 +5315,7 @@ rec {
         version = "0.4.5";
         edition = "2021";
         sha256 = "1karc80mx15z435rm1jg3sqylnc58nxi15gqypcd1inkzzpqgfav";
+        libName = "regex_automata";
         authors = [
           "The Rust Project Developers"
           "Andrew Gallant <jamslam@gmail.com>"
@@ -5206,6 +5376,7 @@ rec {
         version = "0.8.2";
         edition = "2021";
         sha256 = "17rd2s8xbiyf6lb4aj2nfi44zqlj98g2ays8zzj2vfs743k79360";
+        libName = "regex_syntax";
         authors = [
           "The Rust Project Developers"
           "Andrew Gallant <jamslam@gmail.com>"
@@ -5222,6 +5393,7 @@ rec {
         version = "0.1.23";
         edition = "2015";
         sha256 = "0xnbk2bmyzshacjm2g1kd4zzv2y2az14bw3sjccq5qkpmsfvn9nn";
+        libName = "rustc_demangle";
         authors = [
           "Alex Crichton <alex@alexcrichton.com>"
         ];
@@ -5304,6 +5476,7 @@ rec {
         edition = "2018";
         sha256 = "1d50kbaslrrd0374ivx15jg57f03y5xzil1wd2ajlvajzlkbzw53";
         procMacro = true;
+        libName = "seq_macro";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -5461,6 +5634,7 @@ rec {
         version = "1.4.1";
         edition = "2015";
         sha256 = "18crkkw5k82bvcx088xlf5g4n3772m24qhzgfan80nda7d3rn8nq";
+        libName = "signal_hook_registry";
         authors = [
           "Michal 'vorner' Vaner <vorner@vorner.cz>"
           "Masaki Hara <ackie.h.gmai@gmail.com>"
@@ -5699,6 +5873,7 @@ rec {
         version = "0.1.2";
         edition = "2018";
         sha256 = "1wscqj3s30qknda778wf7z99mknk65p0h9hhs658l4pvkfqw6v5z";
+        libName = "streaming_decompression";
         dependencies = [
           {
             name = "fallible-streaming-iterator";
@@ -5712,6 +5887,7 @@ rec {
         version = "0.1.9";
         edition = "2021";
         sha256 = "0845zdv8qb7zwqzglpqc0830i43xh3fb6vqms155wz85qfvk28ib";
+        libName = "streaming_iterator";
         authors = [
           "Steven Fackler <sfackler@gmail.com>"
         ];
@@ -5914,6 +6090,7 @@ rec {
         version = "0.1.5";
         edition = "2021";
         sha256 = "1gb974chm9aj8ifkyibylxkyb5an4bf5y8dxb18pqmck698gmdfg";
+        libName = "target_features";
         authors = [
           "Caleb Zulawski <caleb.zulawski@gmail.com>"
         ];
@@ -5941,6 +6118,7 @@ rec {
         edition = "2021";
         sha256 = "0w9ldp8fa574ilz4dn7y7scpcq66vdjy59qal8qdpwsh7faal3zs";
         procMacro = true;
+        libName = "thiserror_impl";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -6076,6 +6254,7 @@ rec {
         edition = "2021";
         sha256 = "0fwjy4vdx1h9pi4g2nml72wi0fr27b5m954p13ji9anyy8l1x2jv";
         procMacro = true;
+        libName = "tokio_macros";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -6101,6 +6280,7 @@ rec {
         version = "0.7.10";
         edition = "2021";
         sha256 = "058y6x4mf0fsqji9rfyb77qbfyc50y4pk2spqgj6xsyr693z66al";
+        libName = "tokio_util";
         authors = [
           "Tokio Contributors <team@tokio.rs>"
         ];
@@ -6151,6 +6331,96 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "io" "io-util" ];
       };
+      "tracing" = rec {
+        crateName = "tracing";
+        version = "0.1.40";
+        edition = "2018";
+        sha256 = "1vv48dac9zgj9650pg2b4d0j3w6f3x9gbggf43scq5hrlysklln3";
+        authors = [
+          "Eliza Weisman <eliza@buoyant.io>"
+          "Tokio Contributors <team@tokio.rs>"
+        ];
+        dependencies = [
+          {
+            name = "pin-project-lite";
+            packageId = "pin-project-lite";
+          }
+          {
+            name = "tracing-attributes";
+            packageId = "tracing-attributes";
+            optional = true;
+          }
+          {
+            name = "tracing-core";
+            packageId = "tracing-core";
+            usesDefaultFeatures = false;
+          }
+        ];
+        features = {
+          "attributes" = [ "tracing-attributes" ];
+          "default" = [ "std" "attributes" ];
+          "log" = [ "dep:log" ];
+          "log-always" = [ "log" ];
+          "std" = [ "tracing-core/std" ];
+          "tracing-attributes" = [ "dep:tracing-attributes" ];
+          "valuable" = [ "tracing-core/valuable" ];
+        };
+        resolvedDefaultFeatures = [ "attributes" "default" "std" "tracing-attributes" ];
+      };
+      "tracing-attributes" = rec {
+        crateName = "tracing-attributes";
+        version = "0.1.27";
+        edition = "2018";
+        sha256 = "1rvb5dn9z6d0xdj14r403z0af0bbaqhg02hq4jc97g5wds6lqw1l";
+        procMacro = true;
+        libName = "tracing_attributes";
+        authors = [
+          "Tokio Contributors <team@tokio.rs>"
+          "Eliza Weisman <eliza@buoyant.io>"
+          "David Barsky <dbarsky@amazon.com>"
+        ];
+        dependencies = [
+          {
+            name = "proc-macro2";
+            packageId = "proc-macro2";
+          }
+          {
+            name = "quote";
+            packageId = "quote";
+          }
+          {
+            name = "syn";
+            packageId = "syn 2.0.48";
+            usesDefaultFeatures = false;
+            features = [ "full" "parsing" "printing" "visit-mut" "clone-impls" "extra-traits" "proc-macro" ];
+          }
+        ];
+        features = { };
+      };
+      "tracing-core" = rec {
+        crateName = "tracing-core";
+        version = "0.1.32";
+        edition = "2018";
+        sha256 = "0m5aglin3cdwxpvbg6kz0r9r0k31j48n0kcfwsp6l49z26k3svf0";
+        libName = "tracing_core";
+        authors = [
+          "Tokio Contributors <team@tokio.rs>"
+        ];
+        dependencies = [
+          {
+            name = "once_cell";
+            packageId = "once_cell";
+            optional = true;
+          }
+        ];
+        features = {
+          "default" = [ "std" "valuable/std" ];
+          "once_cell" = [ "dep:once_cell" ];
+          "std" = [ "once_cell" ];
+          "valuable" = [ "dep:valuable" ];
+        };
+        resolvedDefaultFeatures = [ "once_cell" "std" ];
+      };
       "typenum" = rec {
         crateName = "typenum";
         version = "1.17.0";
@@ -6171,6 +6441,7 @@ rec {
         version = "1.0.12";
         edition = "2018";
         sha256 = "0jzf1znfpb2gx8nr8mvmyqs1crnv79l57nxnbiszc7xf7ynbjm1k";
+        libName = "unicode_ident";
         authors = [
           "David Tolnay <dtolnay@gmail.com>"
         ];
@@ -6181,6 +6452,7 @@ rec {
         version = "0.1.11";
         edition = "2015";
         sha256 = "11ds4ydhg8g7l06rlmh712q41qsrd0j0h00n1jm74kww3kqk65z5";
+        libName = "unicode_width";
         authors = [
           "kwantam <kwantam@gmail.com>"
           "Manish Goregaokar <manishsmail@gmail.com>"
@@ -6225,6 +6497,7 @@ rec {
         version = "0.2.91";
         edition = "2018";
         sha256 = "0zwbb07ln4m5hh6axamc701nnj090nd66syxbf6bagzf189j9qf1";
+        libName = "wasm_bindgen";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -6256,6 +6529,7 @@ rec {
         version = "0.2.91";
         edition = "2018";
         sha256 = "02zpi9sjzhd8kfv1yj9m1bs4a41ik9ii5bc8hjf60arm1j8f3ry9";
+        libName = "wasm_bindgen_backend";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -6301,6 +6575,7 @@ rec {
         edition = "2018";
         sha256 = "1va6dilw9kcnvsg5043h5b9mwc5sgq0lyhj9fif2n62qsgigj2mk";
         procMacro = true;
+        libName = "wasm_bindgen_macro";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -6325,6 +6600,7 @@ rec {
         version = "0.2.91";
         edition = "2018";
         sha256 = "0rlyl3yzwbcnc691mvx78m1wbqf1qs52mlc3g88bh7ihwrdk4bv4";
+        libName = "wasm_bindgen_macro_support";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -6363,6 +6639,7 @@ rec {
         edition = "2018";
         links = "wasm_bindgen";
         sha256 = "0f4qmjv57ppwi4xpdxgcd77vz9vmvlrnybg8dj430hzhvk96n62g";
+        libName = "wasm_bindgen_shared";
         authors = [
           "The wasm-bindgen Developers"
         ];
@@ -6384,12 +6661,7 @@ rec {
             requiredFeatures = [ ];
           }
         ];
-        # We can't filter paths with references in Nix 2.4
-        # See https://github.com/NixOS/nix/issues/5410
-        src =
-          if ((lib.versionOlder builtins.nixVersion "2.4pre20211007") || (lib.versionOlder "2.5" builtins.nixVersion))
-          then lib.cleanSourceWith { filter = sourceFilter; src = ./.; }
-          else ./.;
+        src = lib.cleanSourceWith { filter = sourceFilter; src = ./.; };
         dependencies = [
           {
             name = "anyhow";
@@ -6437,12 +6709,12 @@ rec {
           {
             name = "winapi-i686-pc-windows-gnu";
             packageId = "winapi-i686-pc-windows-gnu";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "i686-pc-windows-gnu");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "i686-pc-windows-gnu");
           }
           {
             name = "winapi-x86_64-pc-windows-gnu";
             packageId = "winapi-x86_64-pc-windows-gnu";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnu");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "x86_64-pc-windows-gnu");
           }
         ];
         features = {
@@ -6455,6 +6727,7 @@ rec {
         version = "0.4.0";
         edition = "2015";
         sha256 = "1dmpa6mvcvzz16zg6d5vrfy4bxgg541wxrcip7cnshi06v38ffxc";
+        libName = "winapi_i686_pc_windows_gnu";
         authors = [
           "Peter Atashian <retep998@gmail.com>"
         ];
@@ -6465,6 +6738,7 @@ rec {
         version = "0.4.0";
         edition = "2015";
         sha256 = "0gqq64czqb64kskjryj8isp62m2sgvx25yyj3kpc2myh85w24bki";
+        libName = "winapi_x86_64_pc_windows_gnu";
         authors = [
           "Peter Atashian <retep998@gmail.com>"
         ];
@@ -7156,6 +7430,7 @@ rec {
         version = "0.52.0";
         edition = "2021";
         sha256 = "1nc3qv7sy24x0nlnb32f7alzpd6f72l4p24vl65vydbyil669ark";
+        libName = "windows_core";
         authors = [
           "Microsoft"
         ];
@@ -7173,6 +7448,7 @@ rec {
         version = "0.48.0";
         edition = "2018";
         sha256 = "1aan23v5gs7gya1lc46hqn9mdh8yph3fhxmhxlw36pn6pqc28zb7";
+        libName = "windows_sys";
         authors = [
           "Microsoft"
         ];
@@ -7466,6 +7742,7 @@ rec {
         version = "0.52.0";
         edition = "2021";
         sha256 = "0gd3v4ji88490zgb6b5mq5zgbvwv7zx1ibn8v3x83rwcdbryaar8";
+        libName = "windows_sys";
         authors = [
           "Microsoft"
         ];
@@ -7713,6 +7990,7 @@ rec {
         version = "0.48.5";
         edition = "2018";
         sha256 = "034ljxqshifs1lan89xwpcy1hp0lhdh4b5n0d2z4fwjx2piacbws";
+        libName = "windows_targets";
         authors = [
           "Microsoft"
         ];
@@ -7720,7 +7998,7 @@ rec {
           {
             name = "windows_aarch64_gnullvm";
             packageId = "windows_aarch64_gnullvm 0.48.5";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-pc-windows-gnullvm");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "aarch64-pc-windows-gnullvm");
           }
           {
             name = "windows_aarch64_msvc";
@@ -7745,7 +8023,7 @@ rec {
           {
             name = "windows_x86_64_gnullvm";
             packageId = "windows_x86_64_gnullvm 0.48.5";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnullvm");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "x86_64-pc-windows-gnullvm");
           }
           {
             name = "windows_x86_64_msvc";
@@ -7760,6 +8038,7 @@ rec {
         version = "0.52.0";
         edition = "2021";
         sha256 = "1kg7a27ynzw8zz3krdgy6w5gbqcji27j1sz4p7xk2j5j8082064a";
+        libName = "windows_targets";
         authors = [
           "Microsoft"
         ];
@@ -7767,7 +8046,7 @@ rec {
           {
             name = "windows_aarch64_gnullvm";
             packageId = "windows_aarch64_gnullvm 0.52.0";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-pc-windows-gnullvm");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "aarch64-pc-windows-gnullvm");
           }
           {
             name = "windows_aarch64_msvc";
@@ -7792,7 +8071,7 @@ rec {
           {
             name = "windows_x86_64_gnullvm";
             packageId = "windows_x86_64_gnullvm 0.52.0";
-            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnullvm");
+            target = { target, features }: (stdenv.hostPlatform.rust.rustcTarget == "x86_64-pc-windows-gnullvm");
           }
           {
             name = "windows_x86_64_msvc";
@@ -7947,6 +8226,7 @@ rec {
         version = "0.8.8";
         edition = "2018";
         sha256 = "0q9xl4kxibh61631lw9m7if7pkdvq3pp5ss52zdkxs6rirkhdgjk";
+        libName = "xxhash_rust";
         authors = [
           "Douman <douman@gmx.se>"
         ];
@@ -7995,6 +8275,7 @@ rec {
         edition = "2018";
         sha256 = "19nj11md42aijyqnfx8pa647fjzhz537xyc624rajwwfrn6b3qcw";
         procMacro = true;
+        libName = "zerocopy_derive";
         authors = [
           "Joshua Liebow-Feeser <joshlf@google.com>"
         ];
@@ -8069,6 +8350,7 @@ rec {
         version = "7.0.0";
         edition = "2018";
         sha256 = "0gpav2lcibrpmyslmjkcn3w0w64qif3jjljd2h8lr4p249s7qx23";
+        libName = "zstd_safe";
         authors = [
           "Alexandre Bury <alexandre.bury@gmail.com>"
         ];
@@ -8102,6 +8384,7 @@ rec {
         edition = "2018";
         links = "zstd";
         sha256 = "0mk6a2367swdi22zg03lcackpnvgq96d7120awd4i83lm2lfy5ly";
+        libName = "zstd_sys";
         authors = [
           "Alexandre Bury <alexandre.bury@gmail.com>"
         ];
@@ -8137,14 +8420,11 @@ rec {
       fuchsia = true;
       test = false;
 
-      /* We are choosing an arbitrary rust version to grab `lib` from,
-      which is unfortunate, but `lib` has been version-agnostic the
-      whole time so this is good enough for now.
-      */
-      os = pkgs.rust.lib.toTargetOs platform;
-      arch = pkgs.rust.lib.toTargetArch platform;
-      family = pkgs.rust.lib.toTargetFamily platform;
-      vendor = pkgs.rust.lib.toTargetVendor platform;
+      inherit (platform.rust.platform)
+        arch
+        os
+        vendor;
+      family = platform.rust.platform.target-family;
       env = "gnu";
       endian =
         if platform.parsed.cpu.significantByte.name == "littleEndian"
@@ -8221,6 +8501,7 @@ rec {
               (
                 _: {
                   buildTests = true;
+                  release = false;
                 }
               );
             # If the user hasn't set any pre/post commands, we don't want to
@@ -8233,51 +8514,41 @@ rec {
                 testPostRun
               ]);
           in
-          pkgs.runCommand "run-tests-${testCrate.name}"
-            {
-              inherit testCrateFlags;
-              buildInputs = testInputs;
-            } ''
-            set -e
+          pkgs.stdenvNoCC.mkDerivation {
+            name = "run-tests-${testCrate.name}";
 
-            export RUST_BACKTRACE=1
+            inherit (crate) src;
 
-            # recreate a file hierarchy as when running tests with cargo
+            inherit testCrateFlags;
 
-            # the source for test data
-            # It's necessary to locate the source in $NIX_BUILD_TOP/source/
-            # instead of $NIX_BUILD_TOP/
-            # because we compiled those test binaries in the former and not the latter.
-            # So all paths will expect source tree to be there and not in the build top directly.
-            # For example: $NIX_BUILD_TOP := /build in general, if you ask yourself.
-            # TODO(raitobezarius): I believe there could be more edge cases if `crate.sourceRoot`
-            # do exist but it's very hard to reason about them, so let's wait until the first bug report.
-            mkdir -p source/
-            cd source/
+            buildInputs = testInputs;
 
-            ${pkgs.buildPackages.xorg.lndir}/bin/lndir ${crate.src}
+            buildPhase = ''
+              set -e
+              export RUST_BACKTRACE=1
 
-            # build outputs
-            testRoot=target/debug
-            mkdir -p $testRoot
+              # build outputs
+              testRoot=target/debug
+              mkdir -p $testRoot
 
-            # executables of the crate
-            # we copy to prevent std::env::current_exe() to resolve to a store location
-            for i in ${crate}/bin/*; do
-              cp "$i" "$testRoot"
-            done
-            chmod +w -R .
+              # executables of the crate
+              # we copy to prevent std::env::current_exe() to resolve to a store location
+              for i in ${crate}/bin/*; do
+                cp "$i" "$testRoot"
+              done
+              chmod +w -R .
 
-            # test harness executables are suffixed with a hash, like cargo does
-            # this allows to prevent name collision with the main
-            # executables of the crate
-            hash=$(basename $out)
-            for file in ${drv}/tests/*; do
-              f=$testRoot/$(basename $file)-$hash
-              cp $file $f
-              ${testCommand}
-            done
-          '';
+              # test harness executables are suffixed with a hash, like cargo does
+              # this allows to prevent name collision with the main
+              # executables of the crate
+              hash=$(basename $out)
+              for file in ${drv}/tests/*; do
+                f=$testRoot/$(basename $file)-$hash
+                cp $file $f
+                ${testCommand}
+              done
+            '';
+          };
       in
       pkgs.runCommand "${crate.name}-linked"
         {
@@ -8386,7 +8657,7 @@ rec {
             let
               self = {
                 crates = lib.mapAttrs (packageId: value: buildByPackageIdForPkgsImpl self pkgs packageId) crateConfigs;
-                target = makeTarget pkgs.stdenv.hostPlatform;
+                target = makeTarget stdenv.hostPlatform;
                 build = mkBuiltByPackageIdByPkgs pkgs.buildPackages;
               };
             in
@@ -8461,8 +8732,6 @@ rec {
             buildRustCrateForPkgsFunc pkgs
               (
                 crateConfig // {
-                  # https://github.com/NixOS/nixpkgs/issues/218712
-                  dontStrip = stdenv.hostPlatform.isDarwin;
                   src = crateConfig.src or (
                     pkgs.fetchurl rec {
                       name = "${crateConfig.crateName}-${crateConfig.version}.tar.gz";
diff --git a/tvix/tools/weave/default.nix b/tvix/tools/weave/default.nix
index 5a84d7f64287..3d979b6d3327 100644
--- a/tvix/tools/weave/default.nix
+++ b/tvix/tools/weave/default.nix
@@ -1,3 +1,11 @@
-{ pkgs, ... }:
+{ pkgs, depot, ... }:
 
-(pkgs.callPackage ./Cargo.nix { }).rootCrate.build
+(pkgs.callPackage ./Cargo.nix {
+  defaultCrateOverrides = (depot.tvix.utils.defaultCrateOverridesForPkgs pkgs) // {
+    weave = prev: {
+      src = depot.tvix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
+    };
+  };
+}).rootCrate.build.overrideAttrs {
+  meta.ci.extraSteps.crate2nix-check = depot.tvix.utils.mkCrate2nixCheck ./Cargo.nix;
+}
diff --git a/tvix/tracing/Cargo.toml b/tvix/tracing/Cargo.toml
new file mode 100644
index 000000000000..acd968eb231f
--- /dev/null
+++ b/tvix/tracing/Cargo.toml
@@ -0,0 +1,54 @@
+[package]
+name = "tvix-tracing"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+lazy_static = { workspace = true }
+tracing = { workspace = true, features = ["max_level_trace", "release_max_level_debug"] }
+tracing-subscriber = { workspace = true, features = ["env-filter"] }
+indicatif = { workspace = true }
+tracing-indicatif = { workspace = true }
+tokio = { workspace = true, features = ["sync", "rt"] }
+thiserror = { workspace = true }
+
+tracing-opentelemetry = { workspace = true, optional = true }
+opentelemetry = { workspace = true, optional = true }
+opentelemetry-otlp = { workspace = true, optional = true }
+opentelemetry_sdk = { workspace = true, features = ["rt-tokio"], optional = true }
+tracing-tracy = { workspace = true, features = ["flush-on-exit"], optional = true }
+opentelemetry-http = { workspace = true, optional = true }
+
+tonic = { workspace = true, optional = true }
+http  = { workspace = true, optional = true }
+
+reqwest-tracing = { workspace = true, optional = true }
+
+axum = { workspace = true, optional = true }
+
+[features]
+default = []
+otlp = [
+  "dep:tracing-opentelemetry",
+  "dep:opentelemetry",
+  "dep:opentelemetry-otlp",
+  "dep:opentelemetry_sdk",
+  "dep:opentelemetry-http",
+  "reqwest-tracing?/opentelemetry_0_22",
+]
+tracy = [
+  "dep:tracing-tracy"
+]
+tonic = [
+  "dep:tonic",
+  "dep:http",
+]
+reqwest = [
+  "dep:reqwest-tracing",
+]
+axum = [
+  "dep:axum",
+]
+
+[lints]
+workspace = true
diff --git a/tvix/tracing/default.nix b/tvix/tracing/default.nix
new file mode 100644
index 000000000000..b519d0ffc0b3
--- /dev/null
+++ b/tvix/tracing/default.nix
@@ -0,0 +1,11 @@
+{ depot, lib, ... }:
+
+(depot.tvix.crates.workspaceMembers.tvix-tracing.build.override {
+  runTests = true;
+}).overrideAttrs (old: rec {
+  meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+  passthru = old.passthru // (depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = [ "otlp" "tracy" "tonic" "reqwest" "axum" ];
+  });
+})
diff --git a/tvix/tracing/src/lib.rs b/tvix/tracing/src/lib.rs
new file mode 100644
index 000000000000..fa9723d8cecc
--- /dev/null
+++ b/tvix/tracing/src/lib.rs
@@ -0,0 +1,333 @@
+use indicatif::ProgressStyle;
+use lazy_static::lazy_static;
+use tokio::sync::{mpsc, oneshot};
+use tracing::Level;
+use tracing_indicatif::{filter::IndicatifFilter, writer, IndicatifLayer, IndicatifWriter};
+use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Layer};
+
+#[cfg(feature = "otlp")]
+use opentelemetry::{
+    trace::{Tracer, TracerProvider},
+    KeyValue,
+};
+#[cfg(feature = "otlp")]
+use opentelemetry_sdk::{
+    propagation::TraceContextPropagator,
+    resource::{ResourceDetector, SdkProvidedResourceDetector},
+    trace::BatchConfigBuilder,
+    Resource,
+};
+#[cfg(feature = "tracy")]
+use tracing_tracy::TracyLayer;
+
+pub mod propagate;
+
+lazy_static! {
+    pub static ref PB_PROGRESS_STYLE: ProgressStyle = ProgressStyle::with_template(
+        "{span_child_prefix} {wide_msg} {bar:10} ({elapsed}) {pos:>7}/{len:7}"
+    )
+    .expect("invalid progress template");
+    pub static ref PB_TRANSFER_STYLE: ProgressStyle = ProgressStyle::with_template(
+        "{span_child_prefix} {wide_msg} {binary_bytes:>7}/{binary_total_bytes:7}@{decimal_bytes_per_sec} ({elapsed}) {bar:10} "
+    )
+    .expect("invalid progress template");
+    pub static ref PB_SPINNER_STYLE: ProgressStyle = ProgressStyle::with_template(
+        "{span_child_prefix}{spinner} {wide_msg} ({elapsed}) {pos:>7}/{len:7}"
+    )
+    .expect("invalid progress template");
+}
+
+#[derive(thiserror::Error, Debug)]
+pub enum Error {
+    #[error(transparent)]
+    Init(#[from] tracing_subscriber::util::TryInitError),
+
+    #[error(transparent)]
+    MpscSend(#[from] mpsc::error::SendError<Option<oneshot::Sender<()>>>),
+
+    #[error(transparent)]
+    OneshotRecv(#[from] oneshot::error::RecvError),
+}
+
+#[derive(Clone)]
+pub struct TracingHandle {
+    tx: Option<mpsc::Sender<Option<oneshot::Sender<()>>>>,
+
+    stdout_writer: IndicatifWriter<writer::Stdout>,
+    stderr_writer: IndicatifWriter<writer::Stderr>,
+}
+
+impl TracingHandle {
+    /// Returns a writer for [std::io::Stdout] that ensures its output will not be clobbered by
+    /// active progress bars.
+    ///
+    /// Instead of `println!(...)` prefer `writeln!(handle.get_stdout_writer(), ...)`
+    pub fn get_stdout_writer(&self) -> IndicatifWriter<writer::Stdout> {
+        // clone is fine here because its only a wrapper over an `Arc`
+        self.stdout_writer.clone()
+    }
+
+    /// Returns a writer for [std::io::Stderr] that ensures its output will not be clobbered by
+    /// active progress bars.
+    ///
+    /// Instead of `println!(...)` prefer `writeln!(handle.get_stderr_writer(), ...)`.
+    pub fn get_stderr_writer(&self) -> IndicatifWriter<writer::Stderr> {
+        // clone is fine here because its only a wrapper over an `Arc`
+        self.stderr_writer.clone()
+    }
+
+    /// This will flush possible attached tracing providers, e.g. otlp exported, if enabled.
+    /// If there is none enabled this will result in a noop.
+    ///
+    /// It will not wait until the flush is complete, but you can pass in an oneshot::Sender which
+    /// will receive a message once the flush is completed.
+    pub async fn flush(&self, msg: Option<oneshot::Sender<()>>) -> Result<(), Error> {
+        if let Some(tx) = &self.tx {
+            Ok(tx.send(msg).await?)
+        } else {
+            // If we have a message passed in we need to notify the receiver
+            if let Some(tx) = msg {
+                let _ = tx.send(());
+            }
+            Ok(())
+        }
+    }
+
+    /// This will flush all all attached tracing providers and will wait until the flush is completed.
+    /// If no tracing providers like otlp are attached then this will be a noop.
+    ///
+    /// This should only be called on a regular shutdown.
+    /// If you correctly need to shutdown tracing on ctrl_c use [force_shutdown](#method.force_shutdown)
+    /// otherwise you will get otlp errors.
+    pub async fn shutdown(&self) -> Result<(), Error> {
+        let (tx, rx) = tokio::sync::oneshot::channel();
+        self.flush(Some(tx)).await?;
+        rx.await?;
+        Ok(())
+    }
+
+    /// This will flush all all attached tracing providers and will wait until the flush is completed.
+    /// After this it will do some other necessary cleanup.
+    /// If no tracing providers like otlp are attached then this will be a noop.
+    ///
+    /// This should only be used if the tool received an ctrl_c otherwise you will get otlp errors.
+    /// If you need to shutdown tracing on a regular exit, you should use the [shutdown](#method.shutdown)
+    /// method.
+    pub async fn force_shutdown(&self) -> Result<(), Error> {
+        let (tx, rx) = tokio::sync::oneshot::channel();
+        self.flush(Some(tx)).await?;
+        rx.await?;
+
+        #[cfg(feature = "otlp")]
+        {
+            // Because of a bug within otlp we currently have to use spawn_blocking otherwise
+            // calling `shutdown_tracer_provider` can block forever. See
+            // https://github.com/open-telemetry/opentelemetry-rust/issues/1395#issuecomment-1953280335
+            //
+            // This still throws an error, if the tool exits regularly: "OpenTelemetry trace error
+            // occurred. oneshot canceled", but not having this leads to errors if we cancel with
+            // ctrl_c.
+            // So this should right now only be used on ctrl_c, for a regular exit use the
+            // [shutdown](#shutdown) method
+            let _ = tokio::task::spawn_blocking(move || {
+                opentelemetry::global::shutdown_tracer_provider();
+            })
+            .await;
+        }
+
+        Ok(())
+    }
+}
+
+pub struct TracingBuilder {
+    level: Level,
+    progess_bar: bool,
+
+    #[cfg(feature = "otlp")]
+    service_name: Option<&'static str>,
+}
+
+impl Default for TracingBuilder {
+    fn default() -> Self {
+        TracingBuilder {
+            level: Level::INFO,
+            progess_bar: false,
+
+            #[cfg(feature = "otlp")]
+            service_name: None,
+        }
+    }
+}
+
+impl TracingBuilder {
+    /// Set the log level for all layers: stderr und otlp if configured. RUST_LOG still has a
+    /// higher priority over this value.
+    pub fn level(mut self, level: Level) -> TracingBuilder {
+        self.level = level;
+        self
+    }
+
+    #[cfg(feature = "otlp")]
+    /// Enable otlp by setting a custom service_name
+    pub fn enable_otlp(mut self, service_name: &'static str) -> TracingBuilder {
+        self.service_name = Some(service_name);
+        self
+    }
+
+    /// Enable progress bar layer, default is disabled
+    pub fn enable_progressbar(mut self) -> TracingBuilder {
+        self.progess_bar = true;
+        self
+    }
+
+    /// This will setup tracing based on the configuration passed in.
+    /// It will setup a stderr writer output layer and a EnvFilter based on the provided log
+    /// level (RUST_LOG still has a higher priority over the configured value).
+    /// The EnvFilter will be applied to all configured layers, also otlp.
+    ///
+    /// It will also configure otlp if the feature is enabled and a service_name was provided. It
+    /// will then correctly setup a channel which is later used for flushing the provider.
+    pub fn build(self) -> Result<TracingHandle, Error> {
+        // Set up the tracing subscriber.
+        let indicatif_layer = IndicatifLayer::new().with_progress_style(PB_SPINNER_STYLE.clone());
+        let stdout_writer = indicatif_layer.get_stdout_writer();
+        let stderr_writer = indicatif_layer.get_stderr_writer();
+        let subscriber = tracing_subscriber::registry()
+            .with(
+                EnvFilter::builder()
+                    .with_default_directive(self.level.into())
+                    .from_env()
+                    .expect("invalid RUST_LOG"),
+            )
+            .with(
+                tracing_subscriber::fmt::Layer::new()
+                    .with_writer(indicatif_layer.get_stderr_writer())
+                    .compact(),
+            )
+            .with((self.progess_bar).then(|| {
+                indicatif_layer.with_filter(
+                    // only show progress for spans with indicatif.pb_show field being set
+                    IndicatifFilter::new(false),
+                )
+            }));
+
+        // Setup otlp if a service_name is configured
+        #[cfg(feature = "otlp")]
+        {
+            if let Some(service_name) = self.service_name {
+                // register a text map propagator for trace propagation
+                opentelemetry::global::set_text_map_propagator(TraceContextPropagator::new());
+
+                let (tracer, tx) = gen_otlp_tracer(service_name.to_string());
+                // Create a tracing layer with the configured tracer
+                let layer = tracing_opentelemetry::layer().with_tracer(tracer);
+
+                #[cfg(feature = "tracy")]
+                {
+                    subscriber
+                        .with(TracyLayer::default())
+                        .with(Some(layer))
+                        .try_init()?;
+                }
+
+                #[cfg(not(feature = "tracy"))]
+                {
+                    subscriber.with(Some(layer)).try_init()?;
+                }
+                return Ok(TracingHandle {
+                    tx: Some(tx),
+                    stdout_writer,
+                    stderr_writer,
+                });
+            }
+        }
+        #[cfg(feature = "tracy")]
+        {
+            subscriber.with(TracyLayer::default()).try_init()?;
+        }
+        #[cfg(not(feature = "tracy"))]
+        {
+            subscriber.try_init()?;
+        }
+
+        Ok(TracingHandle {
+            tx: None,
+            stdout_writer,
+            stderr_writer,
+        })
+    }
+}
+
+/// Returns an OTLP tracer, and the TX part of a channel, which can be used
+/// to request flushes (and signal back the completion of the flush).
+#[cfg(feature = "otlp")]
+fn gen_otlp_tracer(
+    service_name: String,
+) -> (
+    impl Tracer + tracing_opentelemetry::PreSampledTracer,
+    mpsc::Sender<Option<oneshot::Sender<()>>>,
+) {
+    let tracer_provider = opentelemetry_otlp::new_pipeline()
+        .tracing()
+        .with_exporter(opentelemetry_otlp::new_exporter().tonic())
+        .with_batch_config(
+            BatchConfigBuilder::default()
+                // the default values for `max_export_batch_size` is set to 512, which we will fill
+                // pretty quickly, which will then result in an export. We want to make sure that
+                // the export is only done once the schedule is met and not as soon as 512 spans
+                // are collected.
+                .with_max_export_batch_size(4096)
+                // analog to default config `max_export_batch_size * 4`
+                .with_max_queue_size(4096 * 4)
+                // only force an export to the otlp collector every 10 seconds to reduce the amount
+                // of error messages if an otlp collector is not available
+                .with_scheduled_delay(std::time::Duration::from_secs(10))
+                .build(),
+        )
+        .with_trace_config(opentelemetry_sdk::trace::Config::default().with_resource({
+            // use SdkProvidedResourceDetector.detect to detect resources,
+            // but replace the default service name with our default.
+            // https://github.com/open-telemetry/opentelemetry-rust/issues/1298
+            let resources = SdkProvidedResourceDetector.detect(std::time::Duration::from_secs(0));
+            // SdkProvidedResourceDetector currently always sets
+            // `service.name`, but we don't like its default.
+            if resources.get("service.name".into()).unwrap() == "unknown_service".into() {
+                resources.merge(&Resource::new([KeyValue::new(
+                    "service.name",
+                    service_name,
+                )]))
+            } else {
+                resources
+            }
+        }))
+        .install_batch(opentelemetry_sdk::runtime::Tokio)
+        .expect("Failed to install batch exporter using Tokio");
+
+    // Trace provider is need for later use like flushing the provider.
+    // Needs to be kept around for each message to rx we need to handle.
+    let tracer = tracer_provider.tracer("tvix");
+
+    // Set up a channel for flushing trace providers later
+    let (tx, mut rx) = mpsc::channel::<Option<oneshot::Sender<()>>>(16);
+
+    // Spawning a task that listens on rx for any message. Once we receive a message we
+    // correctly call flush on the tracer_provider.
+    tokio::spawn(async move {
+        while let Some(m) = rx.recv().await {
+            // Because of a bug within otlp we currently have to use spawn_blocking
+            // otherwise will calling `force_flush` block forever, especially if the
+            // tool was closed with ctrl_c. See
+            // https://github.com/open-telemetry/opentelemetry-rust/issues/1395#issuecomment-1953280335
+            let _ = tokio::task::spawn_blocking({
+                let tracer_provider = tracer_provider.clone();
+                move || tracer_provider.force_flush()
+            })
+            .await;
+            if let Some(tx) = m {
+                let _ = tx.send(());
+            }
+        }
+    });
+
+    (tracer, tx)
+}
diff --git a/tvix/tracing/src/propagate/axum.rs b/tvix/tracing/src/propagate/axum.rs
new file mode 100644
index 000000000000..6d012f449762
--- /dev/null
+++ b/tvix/tracing/src/propagate/axum.rs
@@ -0,0 +1,48 @@
+#[cfg(feature = "otlp")]
+use opentelemetry::{global, propagation::Extractor};
+#[cfg(feature = "otlp")]
+use tracing_opentelemetry::OpenTelemetrySpanExt;
+
+// TODO: accept_trace can be shared with tonic, as soon as tonic upstream has a release with
+// support for axum07. Latest master already has support for axum07 but there is not release yet:
+// https://github.com/hyperium/tonic/pull/1740
+
+/// Trace context propagation: associate the current span with the otlp trace of the given request,
+/// if any and valid. This only sets the parent trace if the otlp feature is also enabled.
+pub fn accept_trace<B>(request: axum::http::Request<B>) -> axum::http::Request<B> {
+    // we only extract and set a parent trace if otlp feature is enabled, otherwise this feature is
+    // an noop and we return the request as is
+    #[cfg(feature = "otlp")]
+    {
+        // Current context, if no or invalid data is received.
+        let parent_context = global::get_text_map_propagator(|propagator| {
+            propagator.extract(&HeaderExtractor(request.headers()))
+        });
+        tracing::Span::current().set_parent(parent_context);
+    }
+    request
+}
+
+/// Helper for extracting headers from HTTP Requests. This is used for OpenTelemetry context
+/// propagation over HTTP.
+#[cfg(feature = "otlp")]
+struct HeaderExtractor<'a>(&'a axum::http::HeaderMap);
+
+#[cfg(feature = "otlp")]
+impl<'a> Extractor for HeaderExtractor<'a> {
+    /// Get a value for a key from the HeaderMap.  If the value is not valid ASCII, returns None.
+    fn get(&self, key: &str) -> Option<&str> {
+        self.0.get(key).and_then(|v| {
+            let s = v.to_str();
+            if let Err(ref error) = s {
+                tracing::warn!(%error, ?v, "cannot convert header value to ASCII")
+            };
+            s.ok()
+        })
+    }
+
+    /// Collect all the keys from the HeaderMap.
+    fn keys(&self) -> Vec<&str> {
+        self.0.keys().map(|k| k.as_str()).collect()
+    }
+}
diff --git a/tvix/tracing/src/propagate/mod.rs b/tvix/tracing/src/propagate/mod.rs
new file mode 100644
index 000000000000..2e56a832e5b7
--- /dev/null
+++ b/tvix/tracing/src/propagate/mod.rs
@@ -0,0 +1,8 @@
+#[cfg(feature = "tonic")]
+pub mod tonic;
+
+#[cfg(feature = "reqwest")]
+pub mod reqwest;
+
+#[cfg(feature = "axum")]
+pub mod axum;
diff --git a/tvix/tracing/src/propagate/reqwest.rs b/tvix/tracing/src/propagate/reqwest.rs
new file mode 100644
index 000000000000..e2afb3e948c7
--- /dev/null
+++ b/tvix/tracing/src/propagate/reqwest.rs
@@ -0,0 +1,13 @@
+use reqwest_tracing::{SpanBackendWithUrl, TracingMiddleware};
+
+/// Returns a new tracing middleware which can be used with reqwest_middleware.
+/// It will then write the `traceparent` in the header on the request and additionally records the
+/// `url` into `http.url`.
+///
+/// If otlp feature is disabled, this will not insert a `traceparent` into the header. It will
+/// basically function as a noop.
+///
+/// `traceparent` => https://www.w3.org/TR/trace-context/#trace-context-http-headers-format
+pub fn tracing_middleware() -> TracingMiddleware<SpanBackendWithUrl> {
+    TracingMiddleware::<SpanBackendWithUrl>::new()
+}
diff --git a/tvix/tracing/src/propagate/tonic.rs b/tvix/tracing/src/propagate/tonic.rs
new file mode 100644
index 000000000000..75455c056617
--- /dev/null
+++ b/tvix/tracing/src/propagate/tonic.rs
@@ -0,0 +1,57 @@
+#[cfg(feature = "otlp")]
+use opentelemetry::{global, propagation::Injector};
+#[cfg(feature = "otlp")]
+use opentelemetry_http::HeaderExtractor;
+#[cfg(feature = "otlp")]
+use tracing_opentelemetry::OpenTelemetrySpanExt;
+
+/// Trace context propagation: associate the current span with the otlp trace of the given request,
+/// if any and valid. This only sets the parent trace if the otlp feature is also enabled.
+pub fn accept_trace<B>(request: http::Request<B>) -> http::Request<B> {
+    // we only extract and set a parent trace if otlp feature is enabled, otherwise this feature is
+    // an noop and we return the request as is
+    #[cfg(feature = "otlp")]
+    {
+        // Current context, if no or invalid data is received.
+        let parent_context = global::get_text_map_propagator(|propagator| {
+            propagator.extract(&HeaderExtractor(request.headers()))
+        });
+        tracing::Span::current().set_parent(parent_context);
+    }
+    request
+}
+
+#[cfg(feature = "otlp")]
+struct MetadataInjector<'a>(&'a mut tonic::metadata::MetadataMap);
+
+#[cfg(feature = "otlp")]
+impl Injector for MetadataInjector<'_> {
+    fn set(&mut self, key: &str, value: String) {
+        use tonic::metadata::{MetadataKey, MetadataValue};
+        use tracing::warn;
+
+        match MetadataKey::from_bytes(key.as_bytes()) {
+            Ok(key) => match MetadataValue::try_from(&value) {
+                Ok(value) => {
+                    self.0.insert(key, value);
+                }
+                Err(error) => warn!(value, error = format!("{error:#}"), "parse metadata value"),
+            },
+            Err(error) => warn!(key, error = format!("{error:#}"), "parse metadata key"),
+        }
+    }
+}
+
+/// Trace context propagation: send the trace context by injecting it into the metadata of the given
+/// request. This only injects the current span if the otlp feature is also enabled.
+#[allow(unused_mut)]
+pub fn send_trace<T>(mut request: tonic::Request<T>) -> Result<tonic::Request<T>, tonic::Status> {
+    #[cfg(feature = "otlp")]
+    {
+        global::get_text_map_propagator(|propagator| {
+            let context = tracing::Span::current().context();
+            propagator.inject_context(&context, &mut MetadataInjector(request.metadata_mut()))
+        });
+    }
+    Ok(request)
+}
diff --git a/tvix/utils.nix b/tvix/utils.nix
new file mode 100644
index 000000000000..6fa99e63eb05
--- /dev/null
+++ b/tvix/utils.nix
@@ -0,0 +1,160 @@
+{ pkgs, lib, depot, ... }:
+
+{
+  mkFeaturePowerset = { crateName, features, override ? { } }:
+    let
+      powerset = xs:
+        let
+          addElement = set: element:
+            set ++ map (e: [ element ] ++ e) set;
+        in
+        lib.foldl' addElement [ [ ] ] xs;
+    in
+    lib.listToAttrs (map
+      (featuresPowerset: {
+        name = if featuresPowerset != [ ] then "with-features-${lib.concatStringsSep "-" featuresPowerset}" else "no-features";
+        value = depot.tvix.crates.workspaceMembers.${crateName}.build.override (old: {
+          runTests = true;
+          features = featuresPowerset;
+        } // (if lib.isFunction override then override old else override)
+        );
+      })
+      (powerset features));
+
+  # Filters the given source, only keeping files related to the build, preventing unnecessary rebuilds.
+  # Includes src in the root, all other .rs files and optionally Cargo specific files.
+  # Additional files to be included can be specified in extraFileset.
+  filterRustCrateSrc =
+    { root # The original src
+    , extraFileset ? null # Additional filesets to include (e.g. fileFilter for proto files)
+    , cargoSupport ? false
+    }:
+    lib.fileset.toSource {
+      inherit root;
+      fileset = lib.fileset.intersection
+        (lib.fileset.fromSource root) # We build our final fileset from the original src
+        (lib.fileset.unions ([
+          (lib.fileset.maybeMissing (root + "/src")) # src may be missing if the crate just has tests for example
+          (lib.fileset.fileFilter (f: f.hasExt "rs") root)
+        ] ++ lib.optionals cargoSupport [
+          (lib.fileset.fileFilter (f: f.name == "Cargo.toml") root)
+          (lib.fileset.maybeMissing (root + "/Cargo.lock"))
+        ] ++ lib.optional (extraFileset != null) extraFileset));
+    };
+
+  # A function which takes a pkgs instance and returns an overriden defaultCrateOverrides with support for tvix crates.
+  # This can be used throughout the rest of the repo.
+  defaultCrateOverridesForPkgs = pkgs:
+    let
+      commonDarwinDeps = with pkgs.darwin.apple_sdk.frameworks; [
+        Security
+        SystemConfiguration
+      ];
+    in
+    pkgs.defaultCrateOverrides // {
+      nar-bridge = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
+      };
+
+      nix-compat = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc rec {
+          root = prev.src.origSrc;
+          extraFileset = root + "/testdata";
+        };
+      };
+
+      nix-compat-derive = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
+      };
+
+      nix-compat-derive-tests = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
+      };
+
+      tvix-build = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc rec {
+          root = prev.src.origSrc;
+          extraFileset = lib.fileset.fileFilter (f: f.hasExt "proto") root;
+        };
+        PROTO_ROOT = depot.tvix.build.protos.protos;
+        nativeBuildInputs = [ pkgs.protobuf ];
+        buildInputs = lib.optional pkgs.stdenv.isDarwin commonDarwinDeps;
+      };
+
+      tvix-castore = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc rec {
+          root = prev.src.origSrc;
+          extraFileset = lib.fileset.fileFilter (f: f.hasExt "proto") root;
+        };
+        PROTO_ROOT = depot.tvix.castore.protos.protos;
+        nativeBuildInputs = [ pkgs.protobuf ];
+      };
+
+      tvix-cli = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc rec {
+          root = prev.src.origSrc;
+          extraFileset = root + "/tests";
+        };
+        buildInputs = lib.optional pkgs.stdenv.isDarwin commonDarwinDeps;
+      };
+
+      tvix-store = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc rec {
+          root = prev.src.origSrc;
+          extraFileset = lib.fileset.fileFilter (f: f.hasExt "proto") root;
+        };
+        PROTO_ROOT = depot.tvix.store.protos.protos;
+        nativeBuildInputs = [ pkgs.protobuf ];
+        # fuse-backend-rs uses DiskArbitration framework to handle mount/unmount on Darwin
+        buildInputs = lib.optional pkgs.stdenv.isDarwin (commonDarwinDeps ++ pkgs.darwin.apple_sdk.frameworks.DiskArbitration);
+      };
+
+      tvix-eval-builtin-macros = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
+      };
+
+      tvix-eval = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc rec {
+          root = prev.src.origSrc;
+          extraFileset = root + "/proptest-regressions";
+        };
+      };
+
+      tvix-glue = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc {
+          root = prev.src.origSrc;
+        };
+      };
+
+      tvix-serde = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
+      };
+
+      tvix-tracing = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
+      };
+    };
+
+  # This creates an extraStep in CI to check whether the Cargo.nix file is up-to-date.
+  mkCrate2nixCheck =
+    path: # The path to the Cargo.nix to be checked.
+    let
+      relCrateRoot = lib.removePrefix "./" (builtins.dirOf (lib.path.removePrefix depot.path.origSrc path));
+    in
+    {
+      label = "crate2nix check for ${relCrateRoot}";
+      needsOutput = true;
+      alwaysRun = true;
+      command = pkgs.writeShellScript "crate2nix-check-for-${lib.replaceStrings [ "/" ] ["-"] relCrateRoot}" ''
+        (cd $(git rev-parse --show-toplevel)/${relCrateRoot} &&
+          ${depot.tools.crate2nix-generate}/bin/crate2nix-generate &&
+          if [[ -n "$(git status --porcelain -unormal Cargo.nix)" ]]; then
+              echo "----------------------------------------------------------------------------------------------------"
+              echo "Cargo.nix needs to be updated, run 'mg run //tools/crate2nix-generate' in ${relCrateRoot}"
+              echo "----------------------------------------------------------------------------------------------------"
+              exit 1
+          fi
+        )
+      '';
+    };
+}
diff --git a/tvix/verify-lang-tests/default.nix b/tvix/verify-lang-tests/default.nix
index 772c1c532078..e6de1c1f455c 100644
--- a/tvix/verify-lang-tests/default.nix
+++ b/tvix/verify-lang-tests/default.nix
@@ -9,7 +9,9 @@
 let
   testRoot = ../eval/src/tests;
 
-  inherit (pkgs.buildPackages) nix nix_latest;
+  inherit (pkgs.nixVersions) nix_2_3;
+  # The latest Nix version we've verified to work for our testing suite.
+  nix_latest_verified = pkgs.nixVersions.nix_2_24;
 
   parseTest = dir: baseName:
     let
@@ -44,50 +46,54 @@ let
     # C++ Nix can't TCO
     "eval-okay-tail-call-1.nix" = true;
     # Ordering change after 2.3
-    "eval-okay-xml.nix" = [ nix ];
+    "eval-okay-xml.nix" = [ nix_2_3 ];
     # Missing builtins in Nix 2.3
-    "eval-okay-ceil.nix" = [ nix ];
-    "eval-okay-floor-ceil.nix" = [ nix ];
-    "eval-okay-floor.nix" = [ nix ];
-    "eval-okay-groupBy.nix" = [ nix ];
-    "eval-okay-zipAttrsWith.nix" = [ nix ];
-    "eval-okay-builtins-group-by-propagate-catchable.nix" = [ nix ];
+    "eval-okay-ceil.nix" = [ nix_2_3 ];
+    "eval-okay-floor-ceil.nix" = [ nix_2_3 ];
+    "eval-okay-floor.nix" = [ nix_2_3 ];
+    "eval-okay-groupBy.nix" = [ nix_2_3 ];
+    "eval-okay-zipAttrsWith.nix" = [ nix_2_3 ];
+    "eval-okay-builtins-group-by-propagate-catchable.nix" = [ nix_2_3 ];
     # Comparable lists are not in Nix 2.3
-    "eval-okay-sort.nix" = [ nix ];
-    "eval-okay-compare-lists.nix" = [ nix ];
-    "eval-okay-value-pointer-compare.nix" = [ nix ];
-    "eval-okay-builtins-genericClosure-pointer-equality.nix" = [ nix ];
-    "eval-okay-list-comparison.nix" = [ nix ];
+    "eval-okay-sort.nix" = [ nix_2_3 ];
+    "eval-okay-compare-lists.nix" = [ nix_2_3 ];
+    "eval-okay-value-pointer-compare.nix" = [ nix_2_3 ];
+    "eval-okay-builtins-genericClosure-pointer-equality.nix" = [ nix_2_3 ];
+    "eval-okay-list-comparison.nix" = [ nix_2_3 ];
     # getAttrPos gains support for functionArgs-returned sets after 2.3
-    "eval-okay-getattrpos-functionargs.nix" = [ nix ];
+    "eval-okay-getattrpos-functionargs.nix" = [ nix_2_3 ];
     # groupBy appeared (long) after 2.3
-    "eval-okay-builtins-groupby-thunk.nix" = [ nix ];
+    "eval-okay-builtins-groupby-thunk.nix" = [ nix_2_3 ];
     # import is no longer considered a curried primop in Nix > 2.3
-    "eval-okay-import-display.nix" = [ nix ];
+    "eval-okay-import-display.nix" = [ nix_2_3 ];
     # Cycle detection and formatting changed sometime after Nix 2.3
-    "eval-okay-cycle-display-cpp-nix-2.13.nix" = [ nix ];
+    "eval-okay-cycle-display-cpp-nix-2.13.nix" = [ nix_2_3 ];
     # builtins.replaceStrings becomes lazier in Nix 2.16
-    "eval-okay-replacestrings.nix" = [ nix ];
-    # builtins.readFileType is added in Nix 2.15
-    "eval-okay-readFileType.nix" = [ nix ];
+    "eval-okay-replacestrings.nix" = [ nix_2_3 ];
+    # builtins.readFileType is added in Nix 2.14
+    "eval-okay-readFileType.nix" = [ nix_2_3 ];
     # builtins.fromTOML gains support for timestamps in Nix 2.16
-    "eval-okay-fromTOML-timestamps.nix" = [ nix ];
+    "eval-okay-fromTOML-timestamps.nix" = [ nix_2_3 ];
     # identifier formatting changed in Nix 2.17 due to cppnix commit
     # b72bc4a972fe568744d98b89d63adcd504cb586c
-    "eval-okay-identifier-formatting.nix" = [ nix ];
+    "eval-okay-identifier-formatting.nix" = [ nix_2_3 ];
 
     # Differing strictness in the function argument for some builtins in Nix 2.18
     # https://github.com/NixOS/nix/issues/9779
-    "eval-okay-builtins-map-propagate-catchable.nix" = [ nix_latest ];
-    "eval-okay-builtins-gen-list-propagate-catchable.nix" = [ nix_latest ];
+    "eval-okay-builtins-map-propagate-catchable.nix" = [ nix_latest_verified ];
+    "eval-okay-builtins-gen-list-propagate-catchable.nix" = [ nix_latest_verified ];
     "eval-okay-builtins-replace-strings-propagate-catchable.nix" =
-      [ nix_latest ];
-    "eval-okay-builtins-map-function-strictness.nix" = [ nix_latest ];
-    "eval-okay-builtins-genList-function-strictness.nix" = [ nix_latest ];
+      [ nix_latest_verified ];
+    "eval-okay-builtins-map-function-strictness.nix" = [ nix_latest_verified ];
+    "eval-okay-builtins-genList-function-strictness.nix" = [ nix_latest_verified ];
 
     # TODO(sterni): support diffing working directory and home relative paths
     # like C++ Nix test suite (using string replacement).
     "eval-okay-path-antiquotation.nix" = true;
+
+    # The output of dirOf (and maybe other filesystem builtins) have changed in Nix 2.23
+    # when they switched to using std::filesystem, https://github.com/NixOS/nix/pull/10658.
+    "eval-okay-dirof.nix" = [ nix_latest_verified ];
   };
 
   runCppNixLangTests = cpp-nix:
@@ -219,8 +225,11 @@ let
     };
 
 in
-
 depot.nix.readTree.drvTargets {
-  "nix-2.3" = runCppNixLangTests nix;
-  "nix-${lib.versions.majorMinor nix_latest.version}" = runCppNixLangTests nix_latest;
+  "nix-${lib.versions.majorMinor nix_2_3.version}" = runCppNixLangTests nix_2_3;
+  "nix-${lib.versions.majorMinor nix_latest_verified.version}" = lib.warnIf (lib.versionOlder nix_latest_verified.version pkgs.nixVersions.latest.version)
+    "The latest verified Nix version is out of date, consider updating the value of `nix_latest_verified` and verifying that the tests still pass."
+    runCppNixLangTests
+    nix_latest_verified;
 }
+