diff options
Diffstat (limited to 'tvix')
874 files changed, 50254 insertions, 0 deletions
diff --git a/tvix/.gitignore b/tvix/.gitignore new file mode 100644 index 000000000000..93233dccf4d3 --- /dev/null +++ b/tvix/.gitignore @@ -0,0 +1,4 @@ +/target +/result-* +/result +target diff --git a/tvix/.vscode/extensions.json b/tvix/.vscode/extensions.json new file mode 100644 index 000000000000..07405500414a --- /dev/null +++ b/tvix/.vscode/extensions.json @@ -0,0 +1,8 @@ +{ + "recommendations": [ + "rust-lang.rust-analyzer" + ], + "unwantedRecommendations": [ + "rust-lang.rust" + ] +} diff --git a/tvix/Cargo.lock b/tvix/Cargo.lock new file mode 100644 index 000000000000..4716e0e66067 --- /dev/null +++ b/tvix/Cargo.lock @@ -0,0 +1,3104 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "aho-corasick" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67fc08ce920c31afb70f013dcce1bfc3a3195de6a228474e45e1f145b36f8d04" +dependencies = [ + "memchr", +] + +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + +[[package]] +name = "anstream" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is-terminal", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41ed9a86bf92ae6580e0a31281f65a1b1d867c0cc68d5346e2ae128dddfa6a7d" + +[[package]] +name = "anstyle-parse" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e765fd216e48e067936442276d1d57399e37bce53c264d6fefbe298080cb57ee" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" +dependencies = [ + "windows-sys 0.48.0", +] + +[[package]] +name = "anstyle-wincon" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188" +dependencies = [ + "anstyle", + "windows-sys 0.48.0", +] + +[[package]] +name = "anyhow" +version = "1.0.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8" + +[[package]] +name = "arrayref" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" + +[[package]] +name = "arrayvec" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" + +[[package]] +name = "async-stream" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +dependencies = [ + "proc-macro2 1.0.56", + "quote 1.0.26", + "syn 2.0.15", +] + +[[package]] +name = "async-trait" +version = "0.1.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842" +dependencies = [ + "proc-macro2 1.0.56", + "quote 1.0.26", + "syn 2.0.15", +] + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "axum" +version = "0.6.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8175979259124331c1d7bf6586ee7e0da434155e4b2d48ec2c8386281d8df39" +dependencies = [ + "async-trait", + "axum-core", + "bitflags", + "bytes", + "futures-util", + "http", + "http-body", + "hyper", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "sync_wrapper", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http", + "http-body", + "mime", + "rustversion", + "tower-layer", + "tower-service", +] + +[[package]] +name = "base64" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e" +dependencies = [ + "byteorder", +] + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitmaps" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "703642b98a00b3b90513279a8ede3fcfa479c126c5fb46e78f3051522f021403" + +[[package]] +name = "blake3" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ae2468a89544a466886840aa467a25b766499f4f04bf7d9fcd10ecee9fccef" +dependencies = [ + "arrayref", + "arrayvec", + "cc", + "cfg-if", + "constant_time_eq", + "digest 0.10.6", + "rayon", +] + +[[package]] +name = "block-buffer" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" +dependencies = [ + "block-padding", + "byte-tools", + "byteorder", + "generic-array 0.12.4", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array 0.14.7", +] + +[[package]] +name = "block-padding" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" +dependencies = [ + "byte-tools", +] + +[[package]] +name = "bumpalo" +version = "3.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b1ce199063694f33ffb7dd4e0ee620741495c32833cde5aa08f02a0bf96f0c8" + +[[package]] +name = "byte-tools" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "bytes" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" + +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + +[[package]] +name = "cc" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +dependencies = [ + "jobserver", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "ciborium" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c137568cc60b904a7724001b35ce2630fd00d5d84805fbb608ab89509d788f" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "346de753af073cc87b52b2083a506b38ac176a44cfb05497b622e27be899b369" + +[[package]] +name = "ciborium-ll" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "213030a2b5a4e0c0892b6652260cf6ccac84827b83a85a534e178e3906c4cf1b" +dependencies = [ + "ciborium-io", + "half", +] + +[[package]] +name = "clap" +version = "3.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" +dependencies = [ + "bitflags", + "clap_lex 0.2.4", + "indexmap", + "textwrap", +] + +[[package]] +name = "clap" +version = "4.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34d21f9bf1b425d2968943631ec91202fe5e837264063503708b83013f8fc938" +dependencies = [ + "clap_builder", + "clap_derive", + "once_cell", +] + +[[package]] +name = "clap_builder" +version = "4.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "914c8c79fb560f238ef6429439a30023c862f7a28e688c58f7203f12b29970bd" +dependencies = [ + "anstream", + "anstyle", + "bitflags", + "clap_lex 0.4.1", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9644cd56d6b87dbe899ef8b053e331c0637664e9e21a33dfcdc36093f5c5c4" +dependencies = [ + "heck", + "proc-macro2 1.0.56", + "quote 1.0.26", + "syn 2.0.15", +] + +[[package]] +name = "clap_lex" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" +dependencies = [ + "os_str_bytes", +] + +[[package]] +name = "clap_lex" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a2dd5a6fe8c6e3502f568a6353e5273bbb15193ad9a89e457b9970798efbea1" + +[[package]] +name = "clipboard-win" +version = "4.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7191c27c2357d9b7ef96baac1773290d4ca63b24205b82a3fd8a0637afcf0362" +dependencies = [ + "error-code", + "str-buf", + "winapi", +] + +[[package]] +name = "codemap" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e769b5c8c8283982a987c6e948e540254f1058d5a74b8794914d4ef5fc2a24" + +[[package]] +name = "codemap-diagnostic" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ba0e6be8e2774e750f9e90625b490249715bece38a12f9d09e82477caba5028" +dependencies = [ + "atty", + "codemap", + "termcolor", +] + +[[package]] +name = "colorchoice" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" + +[[package]] +name = "constant_time_eq" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13418e745008f7349ec7e449155f419a61b92b58a99cc3616942b926825ec76b" + +[[package]] +name = "count-write" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced507ab50aa0123e2c54db8b5f44fdfee04b1c93744d69e924307945fe57a85" + +[[package]] +name = "countme" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" + +[[package]] +name = "cpufeatures" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e4c1eaa2012c47becbbad2ab175484c2a84d1185b566fb2cc5b8707343dfe58" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32fast" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "criterion" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c76e09c1aae2bc52b3d2f29e13c6572553b30c4aa1b8a49fd70de6412654cb" +dependencies = [ + "anes", + "atty", + "cast", + "ciborium", + "clap 3.2.25", + "criterion-plot", + "itertools", + "lazy_static", + "num-traits", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast", + "itertools", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" +dependencies = [ + "cfg-if", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" +dependencies = [ + "cfg-if", + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695" +dependencies = [ + "autocfg", + "cfg-if", + "crossbeam-utils", + "memoffset", + "scopeguard", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array 0.14.7", + "typenum", +] + +[[package]] +name = "ctor" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096" +dependencies = [ + "quote 1.0.26", + "syn 1.0.109", +] + +[[package]] +name = "data-encoding" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23d8666cb01533c39dde32bcbab8e227b4ed6679b2c925eba05feabea39508fb" + +[[package]] +name = "diff" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" + +[[package]] +name = "digest" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" +dependencies = [ + "generic-array 0.12.4", +] + +[[package]] +name = "digest" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f" +dependencies = [ + "block-buffer 0.10.4", + "crypto-common", + "subtle", +] + +[[package]] +name = "dirs" +version = "4.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-next" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +dependencies = [ + "cfg-if", + "dirs-sys-next", +] + +[[package]] +name = "dirs-sys" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "either" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" + +[[package]] +name = "endian-type" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d" + +[[package]] +name = "errno" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" +dependencies = [ + "errno-dragonfly", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "error-code" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64f18991e7bf11e7ffee451b5318b5c1a73c52d0d0ada6e5a3017c8c1ced6a21" +dependencies = [ + "libc", + "str-buf", +] + +[[package]] +name = "fake-simd" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" + +[[package]] +name = "fastrand" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" +dependencies = [ + "instant", +] + +[[package]] +name = "fd-lock" +version = "3.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39ae6b3d9530211fb3b12a95374b8b0823be812f53d09e18c5675c0146b09642" +dependencies = [ + "cfg-if", + "rustix", + "windows-sys 0.48.0", +] + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "fs2" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + +[[package]] +name = "futures" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" + +[[package]] +name = "futures-executor" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" + +[[package]] +name = "futures-macro" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" +dependencies = [ + "proc-macro2 1.0.56", + "quote 1.0.26", + "syn 2.0.15", +] + +[[package]] +name = "futures-sink" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" + +[[package]] +name = "futures-task" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" + +[[package]] +name = "futures-util" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + +[[package]] +name = "genawaiter" +version = "0.99.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c86bd0361bcbde39b13475e6e36cb24c329964aa2611be285289d1e4b751c1a0" +dependencies = [ + "genawaiter-macro", +] + +[[package]] +name = "genawaiter-macro" +version = "0.99.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b32dfe1fdfc0bbde1f22a5da25355514b5e450c33a6af6770884c8750aedfbc" + +[[package]] +name = "generic-array" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffdf9f34f1447443d37393cc6c2b8313aebddcd96906caf34e54c68d8e57d7bd" +dependencies = [ + "typenum", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + +[[package]] +name = "h2" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17f8a914c2987b688368b5138aa05321db91f4090cf26118185672ad588bce21" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "half" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286" + +[[package]] +name = "hex" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" + +[[package]] +name = "http" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" + +[[package]] +name = "hyper" +version = "0.14.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab302d72a6f11a3b910431ff93aae7e773078c769f0a3ef15fb9ec692ed147d4" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper", + "pin-project-lite", + "tokio", + "tokio-io-timeout", +] + +[[package]] +name = "imbl" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2806b69cd9f4664844027b64465eacb444c67c1db9c778e341adff0c25cdb0d" +dependencies = [ + "bitmaps", + "imbl-sized-chunks", + "proptest", + "rand_core 0.6.4", + "rand_xoshiro", + "serde", + "version_check", +] + +[[package]] +name = "imbl-sized-chunks" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6957ea0b2541c5ca561d3ef4538044af79f8a05a1eb3a3b148936aaceaa1076" +dependencies = [ + "bitmaps", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "io-lifetimes" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c66c74d2ae7e79a5a8f7ac924adbe38ee42a859c6539ad869eb51f0b52dc220" +dependencies = [ + "hermit-abi 0.3.1", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "is-terminal" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adcf93614601c8129ddf72e2d5633df827ba6551541c6d8c59520a371475be1f" +dependencies = [ + "hermit-abi 0.3.1", + "io-lifetimes", + "rustix", + "windows-sys 0.48.0", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" + +[[package]] +name = "jobserver" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "936cfd212a0155903bcbc060e316fb6cc7cbf2e1907329391ebadc1fe0ce77c2" +dependencies = [ + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "lexical-core" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2cde5de06e8d4c2faabc400238f9ae1c74d5412d03a7bd067645ccbc47070e46" +dependencies = [ + "lexical-parse-float", + "lexical-parse-integer", + "lexical-util", + "lexical-write-float", + "lexical-write-integer", +] + +[[package]] +name = "lexical-parse-float" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683b3a5ebd0130b8fb52ba0bdc718cc56815b6a097e28ae5a6997d0ad17dc05f" +dependencies = [ + "lexical-parse-integer", + "lexical-util", + "static_assertions", +] + +[[package]] +name = "lexical-parse-integer" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d0994485ed0c312f6d965766754ea177d07f9c00c9b82a5ee62ed5b47945ee9" +dependencies = [ + "lexical-util", + "static_assertions", +] + +[[package]] +name = "lexical-util" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5255b9ff16ff898710eb9eb63cb39248ea8a5bb036bea8085b1a767ff6c4e3fc" +dependencies = [ + "static_assertions", +] + +[[package]] +name = "lexical-write-float" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accabaa1c4581f05a3923d1b4cfd124c329352288b7b9da09e766b0668116862" +dependencies = [ + "lexical-util", + "lexical-write-integer", + "static_assertions", +] + +[[package]] +name = "lexical-write-integer" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1b6f3d1f4422866b68192d62f77bc5c700bee84f3069f2469d7bc8c77852446" +dependencies = [ + "lexical-util", + "static_assertions", +] + +[[package]] +name = "libc" +version = "0.2.143" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edc207893e85c5d6be840e969b496b53d94cec8be2d501b214f50daa97fa8024" + +[[package]] +name = "libm" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb" + +[[package]] +name = "linux-raw-sys" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ece97ea872ece730aed82664c424eb4c8291e1ff2480247ccf7409044bc6479f" + +[[package]] +name = "lock_api" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "matchit" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b87248edafb776e59e6ee64a79086f65890d3510f2c656c000bf2a7e8a0aea40" + +[[package]] +name = "memchr" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" + +[[package]] +name = "memoffset" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" +dependencies = [ + "autocfg", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "mio" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9" +dependencies = [ + "libc", + "log", + "wasi", + "windows-sys 0.45.0", +] + +[[package]] +name = "multimap" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" + +[[package]] +name = "nibble_vec" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a5d83df9f36fe23f0c3648c6bbb8b0298bb5f1939c8f2704431371f4b84d43" +dependencies = [ + "smallvec", +] + +[[package]] +name = "nix" +version = "0.25.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f346ff70e7dbfd675fe90590b92d59ef2de15a8779ae305ebcbfd3f0caf59be4" +dependencies = [ + "autocfg", + "bitflags", + "cfg-if", + "libc", +] + +[[package]] +name = "nix-cli" +version = "0.1.0" +dependencies = [ + "clap 4.2.7", + "tempfile", +] + +[[package]] +name = "nix-compat" +version = "0.1.0" +dependencies = [ + "anyhow", + "data-encoding", + "glob", + "serde", + "serde_json", + "sha2 0.10.6", + "test-case", + "test-generator", + "thiserror", +] + +[[package]] +name = "nom8" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae01545c9c7fc4486ab7debaf2aad7003ac19431791868fb2e8066df97fad2f8" +dependencies = [ + "memchr", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-traits" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +dependencies = [ + "autocfg", + "libm", +] + +[[package]] +name = "num_cpus" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +dependencies = [ + "hermit-abi 0.2.6", + "libc", +] + +[[package]] +name = "once_cell" +version = "1.17.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" + +[[package]] +name = "oorandom" +version = "11.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" + +[[package]] +name = "opaque-debug" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" + +[[package]] +name = "os_str_bytes" +version = "6.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ceedf44fb00f2d1984b0bc98102627ce622e083e49a5bacdb3e514fa4238e267" + +[[package]] +name = "output_vt100" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "628223faebab4e3e40667ee0b2336d34a5b960ff60ea743ddfdbcf7770bcfb66" +dependencies = [ + "winapi", +] + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +dependencies = [ + "cfg-if", + "instant", + "libc", + "redox_syscall 0.2.16", + "smallvec", + "winapi", +] + +[[package]] +name = "path-clean" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecba01bf2678719532c5e3059e0b5f0811273d94b397088b82e3bd0a78c78fdd" + +[[package]] +name = "percent-encoding" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" + +[[package]] +name = "petgraph" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4" +dependencies = [ + "fixedbitset", + "indexmap", +] + +[[package]] +name = "pin-project" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad29a609b6bcd67fee905812e544992d216af9d755757c05ed2d0e15a74c6ecc" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55" +dependencies = [ + "proc-macro2 1.0.56", + "quote 1.0.26", + "syn 1.0.109", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "plotters" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2538b639e642295546c50fcd545198c9d64ee2a38620a628724a3b266d5fbf97" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "193228616381fecdc1224c62e96946dfbc73ff4384fba576e052ff8c1bea8142" + +[[package]] +name = "plotters-svg" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9a81d2759aae1dae668f783c308bc5c8ebd191ff4184aaa1b37f65a6ae5a56f" +dependencies = [ + "plotters-backend", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "pretty_assertions" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a25e9bcb20aa780fd0bb16b72403a9064d6b3f22f026946029acb941a50af755" +dependencies = [ + "ctor", + "diff", + "output_vt100", + "yansi", +] + +[[package]] +name = "prettyplease" +version = "0.1.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86" +dependencies = [ + "proc-macro2 1.0.56", + "syn 1.0.109", +] + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2 1.0.56", + "quote 1.0.26", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2 1.0.56", + "quote 1.0.26", + "version_check", +] + +[[package]] +name = "proc-macro2" +version = "0.4.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "proc-macro2" +version = "1.0.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "proptest" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29f1b898011ce9595050a68e60f90bad083ff2987a695a42357134c8381fba70" +dependencies = [ + "bit-set", + "bitflags", + "byteorder", + "lazy_static", + "num-traits", + "quick-error 2.0.1", + "rand 0.8.5", + "rand_chacha", + "rand_xorshift", + "regex-syntax 0.6.29", + "rusty-fork", + "tempfile", + "unarray", +] + +[[package]] +name = "prost" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-build" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" +dependencies = [ + "bytes", + "heck", + "itertools", + "lazy_static", + "log", + "multimap", + "petgraph", + "prettyplease", + "prost", + "prost-types", + "regex", + "syn 1.0.109", + "tempfile", + "which", +] + +[[package]] +name = "prost-derive" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2 1.0.56", + "quote 1.0.26", + "syn 1.0.109", +] + +[[package]] +name = "prost-types" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "213622a1460818959ac1181aaeb2dc9c7f63df720db7d788b3e24eacd1983e13" +dependencies = [ + "prost", +] + +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + +[[package]] +name = "quick-error" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" + +[[package]] +name = "quote" +version = "0.6.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" +dependencies = [ + "proc-macro2 0.4.30", +] + +[[package]] +name = "quote" +version = "1.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" +dependencies = [ + "proc-macro2 1.0.56", +] + +[[package]] +name = "radix_trie" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c069c179fcdc6a2fe24d8d18305cf085fdbd4f922c041943e203685d6a1c58fd" +dependencies = [ + "endian-type", + "nibble_vec", +] + +[[package]] +name = "rand" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +dependencies = [ + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rand_xorshift" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" +dependencies = [ + "rand_core 0.6.4", +] + +[[package]] +name = "rand_xoshiro" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" +dependencies = [ + "rand_core 0.6.4", +] + +[[package]] +name = "rayon" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d" +dependencies = [ + "crossbeam-channel", + "crossbeam-deque", + "crossbeam-utils", + "num_cpus", +] + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "redox_syscall" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +dependencies = [ + "bitflags", +] + +[[package]] +name = "redox_syscall" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +dependencies = [ + "bitflags", +] + +[[package]] +name = "redox_users" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" +dependencies = [ + "getrandom", + "redox_syscall 0.2.16", + "thiserror", +] + +[[package]] +name = "regex" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af83e617f331cc6ae2da5443c602dfa5af81e517212d9d611a5b3ba1777b5370" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.7.1", +] + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5996294f19bd3aae0453a862ad728f60e6600695733dd5df01da90c54363a3c" + +[[package]] +name = "remove_dir_all" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" +dependencies = [ + "winapi", +] + +[[package]] +name = "rnix" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb35cedbeb70e0ccabef2a31bcff0aebd114f19566086300b8f42c725fc2cb5f" +dependencies = [ + "rowan", +] + +[[package]] +name = "rowan" +version = "0.15.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64449cfef9483a475ed56ae30e2da5ee96448789fb2aa240a04beb6a055078bf" +dependencies = [ + "countme", + "hashbrown", + "memoffset", + "rustc-hash", + "text-size", +] + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustix" +version = "0.37.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acf8729d8542766f1b2cf77eb034d52f40d375bb8b615d0b147089946e16613d" +dependencies = [ + "bitflags", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys", + "windows-sys 0.48.0", +] + +[[package]] +name = "rustversion" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f3208ce4d8448b3f3e7d168a73f5e0c43a61e32930de3bceeccedb388b6bf06" + +[[package]] +name = "rusty-fork" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" +dependencies = [ + "fnv", + "quick-error 1.2.3", + "tempfile", + "wait-timeout", +] + +[[package]] +name = "rustyline" +version = "10.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1e83c32c3f3c33b08496e0d1df9ea8c64d39adb8eb36a1ebb1440c690697aef" +dependencies = [ + "bitflags", + "cfg-if", + "clipboard-win", + "dirs-next", + "fd-lock", + "libc", + "log", + "memchr", + "nix", + "radix_trie", + "scopeguard", + "unicode-segmentation", + "unicode-width", + "utf8parse", + "winapi", +] + +[[package]] +name = "ryu" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "serde" +version = "1.0.162" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71b2f6e1ab5c2b98c05f0f35b236b22e8df7ead6ffbf51d7808da7f8817e7ab6" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.162" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2a0814352fd64b58489904a44ea8d90cb1a91dcb6b4f5ebabc32c8318e93cb6" +dependencies = [ + "proc-macro2 1.0.56", + "quote 1.0.26", + "syn 2.0.15", +] + +[[package]] +name = "serde_json" +version = "1.0.96" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_spanned" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0efd8caf556a6cebd3b285caf480045fcc1ac04f6bd786b09a6f11af30c4fcf4" +dependencies = [ + "serde", +] + +[[package]] +name = "sha-1" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d94d0bede923b3cea61f3f1ff57ff8cdfd77b400fb8f9998949e0cf04163df" +dependencies = [ + "block-buffer 0.7.3", + "digest 0.8.1", + "fake-simd", + "opaque-debug", +] + +[[package]] +name = "sha2" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a256f46ea78a0c0d9ff00077504903ac881a1dafdc20da66545699e7776b3e69" +dependencies = [ + "block-buffer 0.7.3", + "digest 0.8.1", + "fake-simd", + "opaque-debug", +] + +[[package]] +name = "sha2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.6", +] + +[[package]] +name = "sharded-slab" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "slab" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" +dependencies = [ + "autocfg", +] + +[[package]] +name = "sled" +version = "0.34.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f96b4737c2ce5987354855aed3797279def4ebf734436c6aa4552cf8e169935" +dependencies = [ + "crc32fast", + "crossbeam-epoch", + "crossbeam-utils", + "fs2", + "fxhash", + "libc", + "log", + "parking_lot", + "zstd", +] + +[[package]] +name = "smallvec" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" + +[[package]] +name = "smol_str" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74212e6bbe9a4352329b2f68ba3130c15a3f26fe88ff22dbdc6cdd58fa85e99c" +dependencies = [ + "serde", +] + +[[package]] +name = "socket2" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "ssri" +version = "7.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9cec0d388f39fbe79d7aa600e8d38053bf97b1bc8d350da7c0ba800d0f423f2" +dependencies = [ + "base64 0.10.1", + "digest 0.8.1", + "hex", + "serde", + "sha-1", + "sha2 0.8.2", + "thiserror", +] + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "str-buf" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e08d8363704e6c71fc928674353e6b7c23dcea9d82d7012c8faf2a3a025f8d0" + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "structmeta" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "104842d6278bf64aa9d2f182ba4bde31e8aec7a131d29b7f444bb9b344a09e2a" +dependencies = [ + "proc-macro2 1.0.56", + "quote 1.0.26", + "structmeta-derive", + "syn 1.0.109", +] + +[[package]] +name = "structmeta-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24420be405b590e2d746d83b01f09af673270cf80e9b003a5fa7b651c58c7d93" +dependencies = [ + "proc-macro2 1.0.56", + "quote 1.0.26", + "syn 1.0.109", +] + +[[package]] +name = "subtle" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" + +[[package]] +name = "syn" +version = "0.15.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5" +dependencies = [ + "proc-macro2 0.4.30", + "quote 0.6.13", + "unicode-xid", +] + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2 1.0.56", + "quote 1.0.26", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822" +dependencies = [ + "proc-macro2 1.0.56", + "quote 1.0.26", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "tabwriter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36205cfc997faadcc4b0b87aaef3fbedafe20d38d4959a7ca6ff803564051111" +dependencies = [ + "unicode-width", +] + +[[package]] +name = "tempdir" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8" +dependencies = [ + "rand 0.4.6", + "remove_dir_all", +] + +[[package]] +name = "tempfile" +version = "3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998" +dependencies = [ + "cfg-if", + "fastrand", + "redox_syscall 0.3.5", + "rustix", + "windows-sys 0.45.0", +] + +[[package]] +name = "termcolor" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "test-case" +version = "2.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21d6cf5a7dffb3f9dceec8e6b8ca528d9bd71d36c9f074defb548ce161f598c0" +dependencies = [ + "test-case-macros", +] + +[[package]] +name = "test-case-macros" +version = "2.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e45b7bf6e19353ddd832745c8fcf77a17a93171df7151187f26623f2b75b5b26" +dependencies = [ + "cfg-if", + "proc-macro-error", + "proc-macro2 1.0.56", + "quote 1.0.26", + "syn 1.0.109", +] + +[[package]] +name = "test-generator" +version = "0.3.0" +source = "git+https://github.com/JamesGuthrie/test-generator.git?rev=82e799979980962aec1aa324ec6e0e4cad781f41#82e799979980962aec1aa324ec6e0e4cad781f41" +dependencies = [ + "glob", + "proc-macro2 0.4.30", + "quote 0.6.13", + "syn 0.15.44", +] + +[[package]] +name = "test-strategy" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62d6408d1406657be2f9d1701fbae379331d30d2f6e92050710edb0d34eeb480" +dependencies = [ + "proc-macro2 1.0.56", + "quote 1.0.26", + "structmeta", + "syn 1.0.109", +] + +[[package]] +name = "text-size" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a" + +[[package]] +name = "textwrap" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" + +[[package]] +name = "thiserror" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" +dependencies = [ + "proc-macro2 1.0.56", + "quote 1.0.26", + "syn 2.0.15", +] + +[[package]] +name = "thread_local" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "tokio" +version = "1.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3c786bf8134e5a3a166db9b29ab8f48134739014a3eca7bc6bfa95d673b136f" +dependencies = [ + "autocfg", + "bytes", + "libc", + "mio", + "num_cpus", + "pin-project-lite", + "socket2", + "tokio-macros", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-macros" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +dependencies = [ + "proc-macro2 1.0.56", + "quote 1.0.26", + "syn 2.0.15", +] + +[[package]] +name = "tokio-stream" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + +[[package]] +name = "toml" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb9d890e4dc9298b70f740f615f2e05b9db37dce531f6b24fb77ac993f9f217" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4553f467ac8e3d374bc9a177a26801e5d0f9b211aa1673fb137a403afd1c9cf5" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c59d8dd7d0dcbc6428bf7aa2f0e823e26e43b3c9aca15bbc9475d23e5fa12b" +dependencies = [ + "indexmap", + "nom8", + "serde", + "serde_spanned", + "toml_datetime", +] + +[[package]] +name = "tonic" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f219fad3b929bef19b1f86fbc0358d35daed8f2cac972037ac0dc10bbb8d5fb" +dependencies = [ + "async-stream", + "async-trait", + "axum", + "base64 0.13.1", + "bytes", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost", + "prost-derive", + "tokio", + "tokio-stream", + "tokio-util", + "tower", + "tower-layer", + "tower-service", + "tracing", + "tracing-futures", +] + +[[package]] +name = "tonic-build" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5bf5e9b9c0f7e0a7c027dcfaba7b2c60816c7049171f679d99ee2ff65d0de8c4" +dependencies = [ + "prettyplease", + "proc-macro2 1.0.56", + "prost-build", + "quote 1.0.26", + "syn 1.0.109", +] + +[[package]] +name = "tonic-mock" +version = "0.1.0" +source = "git+https://github.com/brainrake/tonic-mock?branch=bump-dependencies#ec1a15510875de99d709d684190db5d9beab175e" +dependencies = [ + "bytes", + "futures", + "http", + "http-body", + "prost", + "tonic", +] + +[[package]] +name = "tonic-reflection" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0455f730d540a1484bffc3c55c94100b18a662597b982c2e9073f2c55c602616" +dependencies = [ + "bytes", + "prost", + "prost-types", + "tokio", + "tokio-stream", + "tonic", + "tonic-build", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap", + "pin-project", + "pin-project-lite", + "rand 0.8.5", + "slab", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +dependencies = [ + "cfg-if", + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f57e3ca2a01450b1a921183a9c9cbfda207fd822cef4ccb00a65402cbba7a74" +dependencies = [ + "proc-macro2 1.0.56", + "quote 1.0.26", + "syn 2.0.15", +] + +[[package]] +name = "tracing-core" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "pin-project", + "tracing", +] + +[[package]] +name = "tracing-log" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +dependencies = [ + "lazy_static", + "log", + "tracing-core", +] + +[[package]] +name = "tracing-serde" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" +dependencies = [ + "serde", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" +dependencies = [ + "nu-ansi-term", + "serde", + "serde_json", + "sharded-slab", + "smallvec", + "thread_local", + "tracing-core", + "tracing-log", + "tracing-serde", +] + +[[package]] +name = "try-lock" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" + +[[package]] +name = "tvix-cli" +version = "0.1.0" +dependencies = [ + "clap 4.2.7", + "data-encoding", + "dirs", + "nix-compat", + "rustyline", + "smol_str", + "ssri", + "thiserror", + "tvix-eval", + "tvix-store", + "wu-manber", +] + +[[package]] +name = "tvix-eval" +version = "0.1.0" +dependencies = [ + "codemap", + "codemap-diagnostic", + "criterion", + "dirs", + "genawaiter", + "imbl", + "itertools", + "lazy_static", + "lexical-core", + "path-clean", + "pretty_assertions", + "proptest", + "regex", + "rnix", + "rowan", + "serde", + "serde_json", + "smol_str", + "tabwriter", + "tempdir", + "test-generator", + "test-strategy", + "toml", + "tvix-eval-builtin-macros", + "xml-rs", +] + +[[package]] +name = "tvix-eval-builtin-macros" +version = "0.0.1" +dependencies = [ + "proc-macro2 1.0.56", + "quote 1.0.26", + "syn 1.0.109", + "tvix-eval", +] + +[[package]] +name = "tvix-serde" +version = "0.1.0" +dependencies = [ + "serde", + "tvix-eval", +] + +[[package]] +name = "tvix-store" +version = "0.1.0" +dependencies = [ + "anyhow", + "blake3", + "bytes", + "clap 4.2.7", + "count-write", + "data-encoding", + "futures", + "lazy_static", + "nix-compat", + "prost", + "prost-build", + "rayon", + "serde_json", + "sha2 0.10.6", + "sled", + "smol_str", + "tempfile", + "test-case", + "thiserror", + "tokio", + "tokio-stream", + "tokio-util", + "tonic", + "tonic-build", + "tonic-mock", + "tonic-reflection", + "tower", + "tracing", + "tracing-subscriber", + "tvix-eval", + "walkdir", +] + +[[package]] +name = "typenum" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" + +[[package]] +name = "unarray" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" + +[[package]] +name = "unicode-ident" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4" + +[[package]] +name = "unicode-segmentation" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" + +[[package]] +name = "unicode-width" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" + +[[package]] +name = "unicode-xid" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" + +[[package]] +name = "utf8parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "wait-timeout" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +dependencies = [ + "libc", +] + +[[package]] +name = "walkdir" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +dependencies = [ + "log", + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2 1.0.56", + "quote 1.0.26", + "syn 1.0.109", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" +dependencies = [ + "quote 1.0.26", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" +dependencies = [ + "proc-macro2 1.0.56", + "quote 1.0.26", + "syn 1.0.109", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" + +[[package]] +name = "web-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "which" +version = "4.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269" +dependencies = [ + "either", + "libc", + "once_cell", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.0", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" +dependencies = [ + "windows_aarch64_gnullvm 0.48.0", + "windows_aarch64_msvc 0.48.0", + "windows_i686_gnu 0.48.0", + "windows_i686_msvc 0.48.0", + "windows_x86_64_gnu 0.48.0", + "windows_x86_64_gnullvm 0.48.0", + "windows_x86_64_msvc 0.48.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" + +[[package]] +name = "wu-manber" +version = "0.1.0" +source = "git+https://github.com/tvlfyi/wu-manber.git#e77628cafcf45d41b9e455be86a6b1b9f46b2092" + +[[package]] +name = "xml-rs" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "699d0104bcdd7e7af6d093d6c6e2d0c479b8a129ee0d1023b31d2e0c71bfdda2" + +[[package]] +name = "yansi" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" + +[[package]] +name = "zstd" +version = "0.9.2+zstd.1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2390ea1bf6c038c39674f22d95f0564725fc06034a47129179810b2fc58caa54" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "4.1.3+zstd.1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e99d81b99fb3c2c2c794e3fe56c305c63d5173a16a46b5850b07c935ffc7db79" +dependencies = [ + "libc", + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "1.6.2+zstd.1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2daf2f248d9ea44454bfcb2516534e8b8ad2fc91bf818a1885495fc42bc8ac9f" +dependencies = [ + "cc", + "libc", +] diff --git a/tvix/Cargo.nix b/tvix/Cargo.nix new file mode 100644 index 000000000000..332c2834f809 --- /dev/null +++ b/tvix/Cargo.nix @@ -0,0 +1,10624 @@ +# This file was @generated by crate2nix 0.10.0 with the command: +# "generate" +# See https://github.com/kolloch/crate2nix for more info. + +{ nixpkgs ? <nixpkgs> +, pkgs ? import nixpkgs { config = { }; } +, lib ? pkgs.lib +, stdenv ? pkgs.stdenv +, buildRustCrateForPkgs ? pkgs: pkgs.buildRustCrate + # This is used as the `crateOverrides` argument for `buildRustCrate`. +, defaultCrateOverrides ? pkgs.defaultCrateOverrides + # The features to enable for the root_crate or the workspace_members. +, rootFeatures ? [ "default" ] + # If true, throw errors instead of issueing deprecation warnings. +, strictDeprecation ? false + # Used for conditional compilation based on CPU feature detection. +, targetFeatures ? [ ] + # Whether to perform release builds: longer compile times, faster binaries. +, release ? true + # Additional crate2nix configuration if it exists. +, crateConfig ? if builtins.pathExists ./crate-config.nix + then pkgs.callPackage ./crate-config.nix { } + else { } +}: + +rec { + # + # "public" attributes that we attempt to keep stable with new versions of crate2nix. + # + + + # Refer your crate build derivation by name here. + # You can override the features with + # workspaceMembers."${crateName}".build.override { features = [ "default" "feature1" ... ]; }. + workspaceMembers = { + "nix-cli" = rec { + packageId = "nix-cli"; + build = internal.buildRustCrateWithFeatures { + packageId = "nix-cli"; + }; + + # Debug support which might change between releases. + # File a bug if you depend on any for non-debug work! + debug = internal.debugCrate { inherit packageId; }; + }; + "nix-compat" = rec { + packageId = "nix-compat"; + build = internal.buildRustCrateWithFeatures { + packageId = "nix-compat"; + }; + + # Debug support which might change between releases. + # File a bug if you depend on any for non-debug work! + debug = internal.debugCrate { inherit packageId; }; + }; + "tvix-cli" = rec { + packageId = "tvix-cli"; + build = internal.buildRustCrateWithFeatures { + packageId = "tvix-cli"; + }; + + # Debug support which might change between releases. + # File a bug if you depend on any for non-debug work! + debug = internal.debugCrate { inherit packageId; }; + }; + "tvix-eval" = rec { + packageId = "tvix-eval"; + build = internal.buildRustCrateWithFeatures { + packageId = "tvix-eval"; + }; + + # Debug support which might change between releases. + # File a bug if you depend on any for non-debug work! + debug = internal.debugCrate { inherit packageId; }; + }; + "tvix-eval-builtin-macros" = rec { + packageId = "tvix-eval-builtin-macros"; + build = internal.buildRustCrateWithFeatures { + packageId = "tvix-eval-builtin-macros"; + }; + + # Debug support which might change between releases. + # File a bug if you depend on any for non-debug work! + debug = internal.debugCrate { inherit packageId; }; + }; + "tvix-serde" = rec { + packageId = "tvix-serde"; + build = internal.buildRustCrateWithFeatures { + packageId = "tvix-serde"; + }; + + # Debug support which might change between releases. + # File a bug if you depend on any for non-debug work! + debug = internal.debugCrate { inherit packageId; }; + }; + "tvix-store" = rec { + packageId = "tvix-store"; + build = internal.buildRustCrateWithFeatures { + packageId = "tvix-store"; + }; + + # Debug support which might change between releases. + # File a bug if you depend on any for non-debug work! + debug = internal.debugCrate { inherit packageId; }; + }; + }; + + # A derivation that joins the outputs of all workspace members together. + allWorkspaceMembers = pkgs.symlinkJoin { + name = "all-workspace-members"; + paths = + let members = builtins.attrValues workspaceMembers; + in builtins.map (m: m.build) members; + }; + + # + # "internal" ("private") attributes that may change in every new version of crate2nix. + # + + internal = rec { + # Build and dependency information for crates. + # Many of the fields are passed one-to-one to buildRustCrate. + # + # Noteworthy: + # * `dependencies`/`buildDependencies`: similar to the corresponding fields for buildRustCrate. + # but with additional information which is used during dependency/feature resolution. + # * `resolvedDependencies`: the selected default features reported by cargo - only included for debugging. + # * `devDependencies` as of now not used by `buildRustCrate` but used to + # inject test dependencies into the build + + crates = { + "aho-corasick" = rec { + crateName = "aho-corasick"; + version = "1.0.1"; + edition = "2021"; + sha256 = "014ddyrlbwg18m74fa52wrfik8y3pzhwqg811yvsyc8cjb70iz37"; + libName = "aho_corasick"; + authors = [ + "Andrew Gallant <jamslam@gmail.com>" + ]; + dependencies = [ + { + name = "memchr"; + packageId = "memchr"; + optional = true; + usesDefaultFeatures = false; + } + ]; + features = { + "default" = [ "std" "perf-literal" ]; + "logging" = [ "dep:log" ]; + "perf-literal" = [ "dep:memchr" ]; + "std" = [ "memchr?/std" ]; + }; + resolvedDefaultFeatures = [ "default" "perf-literal" "std" ]; + }; + "anes" = rec { + crateName = "anes"; + version = "0.1.6"; + edition = "2018"; + sha256 = "16bj1ww1xkwzbckk32j2pnbn5vk6wgsl3q4p3j9551xbcarwnijb"; + authors = [ + "Robert Vojta <rvojta@me.com>" + ]; + features = { + "bitflags" = [ "dep:bitflags" ]; + "parser" = [ "bitflags" ]; + }; + resolvedDefaultFeatures = [ "default" ]; + }; + "anstream" = rec { + crateName = "anstream"; + version = "0.3.2"; + edition = "2021"; + sha256 = "0qzinx9c8zfq3xqpxzmlv6nrm3ymccr4n8gffkdmj31p50v4za0c"; + dependencies = [ + { + name = "anstyle"; + packageId = "anstyle"; + } + { + name = "anstyle-parse"; + packageId = "anstyle-parse"; + } + { + name = "anstyle-query"; + packageId = "anstyle-query"; + optional = true; + } + { + name = "anstyle-wincon"; + packageId = "anstyle-wincon"; + optional = true; + target = { target, features }: (target."windows" or false); + } + { + name = "colorchoice"; + packageId = "colorchoice"; + optional = true; + } + { + name = "is-terminal"; + packageId = "is-terminal"; + optional = true; + } + { + name = "utf8parse"; + packageId = "utf8parse"; + } + ]; + features = { + "auto" = [ "dep:anstyle-query" "dep:colorchoice" "dep:is-terminal" ]; + "default" = [ "auto" "wincon" ]; + "wincon" = [ "dep:anstyle-wincon" ]; + }; + resolvedDefaultFeatures = [ "auto" "default" "wincon" ]; + }; + "anstyle" = rec { + crateName = "anstyle"; + version = "1.0.0"; + edition = "2021"; + sha256 = "0zbazbfqs4mfw93573f61iy8c78vbbv824m3w206bbljpy39mva1"; + features = { + "default" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "anstyle-parse" = rec { + crateName = "anstyle-parse"; + version = "0.2.0"; + edition = "2021"; + sha256 = "1vjprf080adyxxpls9iwwny3g7irawfns9s2cj9ngq28dqhzsrg7"; + dependencies = [ + { + name = "utf8parse"; + packageId = "utf8parse"; + optional = true; + } + ]; + features = { + "core" = [ "dep:arrayvec" ]; + "default" = [ "utf8" ]; + "utf8" = [ "dep:utf8parse" ]; + }; + resolvedDefaultFeatures = [ "default" "utf8" ]; + }; + "anstyle-query" = rec { + crateName = "anstyle-query"; + version = "1.0.0"; + edition = "2021"; + sha256 = "0js9bgpqz21g0p2nm350cba1d0zfyixsma9lhyycic5sw55iv8aw"; + dependencies = [ + { + name = "windows-sys"; + packageId = "windows-sys 0.48.0"; + target = { target, features }: (target."windows" or false); + features = [ "Win32_System_Console" "Win32_Foundation" ]; + } + ]; + + }; + "anstyle-wincon" = rec { + crateName = "anstyle-wincon"; + version = "1.0.1"; + edition = "2021"; + sha256 = "12714vwjf4c1wm3qf49m5vmd93qvq2nav6zpjc0bxbh3ayjby2hq"; + dependencies = [ + { + name = "anstyle"; + packageId = "anstyle"; + } + { + name = "windows-sys"; + packageId = "windows-sys 0.48.0"; + target = { target, features }: (target."windows" or false); + features = [ "Win32_System_Console" "Win32_Foundation" ]; + } + ]; + + }; + "anyhow" = rec { + crateName = "anyhow"; + version = "1.0.71"; + edition = "2018"; + sha256 = "1f6rm4c9nlp0wazm80wlw45zpmb48nv24x2227zyidz0y0c0czcw"; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + ]; + features = { + "backtrace" = [ "dep:backtrace" ]; + "default" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "arrayref" = rec { + crateName = "arrayref"; + version = "0.3.7"; + edition = "2015"; + sha256 = "0ia5ndyxqkzdymqr4ls53jdmajf09adjimg5kvw65kkprg930jbb"; + authors = [ + "David Roundy <roundyd@physics.oregonstate.edu>" + ]; + + }; + "arrayvec" = rec { + crateName = "arrayvec"; + version = "0.7.2"; + edition = "2018"; + sha256 = "1mjl8jjqxpl0x7sm9cij61cppi7yi38cdrd1l8zjw7h7qxk2v9cd"; + authors = [ + "bluss" + ]; + features = { + "default" = [ "std" ]; + "serde" = [ "dep:serde" ]; + }; + }; + "async-stream" = rec { + crateName = "async-stream"; + version = "0.3.5"; + edition = "2018"; + sha256 = "0l8sjq1rylkb1ak0pdyjn83b3k6x36j22myngl4sqqgg7whdsmnd"; + authors = [ + "Carl Lerche <me@carllerche.com>" + ]; + dependencies = [ + { + name = "async-stream-impl"; + packageId = "async-stream-impl"; + } + { + name = "futures-core"; + packageId = "futures-core"; + } + { + name = "pin-project-lite"; + packageId = "pin-project-lite"; + } + ]; + + }; + "async-stream-impl" = rec { + crateName = "async-stream-impl"; + version = "0.3.5"; + edition = "2018"; + sha256 = "14q179j4y8p2z1d0ic6aqgy9fhwz8p9cai1ia8kpw4bw7q12mrhn"; + procMacro = true; + authors = [ + "Carl Lerche <me@carllerche.com>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "syn"; + packageId = "syn 2.0.15"; + features = [ "full" "visit-mut" ]; + } + ]; + + }; + "async-trait" = rec { + crateName = "async-trait"; + version = "0.1.68"; + edition = "2018"; + sha256 = "0hp8ysdjr8c43avm7bkj73cd22ra3dpzag82bjyyj6qn5a7xvk5r"; + procMacro = true; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "syn"; + packageId = "syn 2.0.15"; + features = [ "full" "visit-mut" ]; + } + ]; + + }; + "atty" = rec { + crateName = "atty"; + version = "0.2.14"; + edition = "2015"; + sha256 = "1s7yslcs6a28c5vz7jwj63lkfgyx8mx99fdirlhi9lbhhzhrpcyr"; + authors = [ + "softprops <d.tangren@gmail.com>" + ]; + dependencies = [ + { + name = "hermit-abi"; + packageId = "hermit-abi 0.1.19"; + target = { target, features }: ("hermit" == target."os"); + } + { + name = "libc"; + packageId = "libc"; + usesDefaultFeatures = false; + target = { target, features }: (target."unix" or false); + } + { + name = "winapi"; + packageId = "winapi"; + target = { target, features }: (target."windows" or false); + features = [ "consoleapi" "processenv" "minwinbase" "minwindef" "winbase" ]; + } + ]; + + }; + "autocfg" = rec { + crateName = "autocfg"; + version = "1.1.0"; + edition = "2015"; + sha256 = "1ylp3cb47ylzabimazvbz9ms6ap784zhb6syaz6c1jqpmcmq0s6l"; + authors = [ + "Josh Stone <cuviper@gmail.com>" + ]; + + }; + "axum" = rec { + crateName = "axum"; + version = "0.6.18"; + edition = "2021"; + sha256 = "0ffzv20n4f68qa7d9cp4am0l7np0wxp5ixkv3lf3694i4mwmj5zq"; + dependencies = [ + { + name = "async-trait"; + packageId = "async-trait"; + } + { + name = "axum-core"; + packageId = "axum-core"; + } + { + name = "bitflags"; + packageId = "bitflags"; + } + { + name = "bytes"; + packageId = "bytes"; + } + { + name = "futures-util"; + packageId = "futures-util"; + usesDefaultFeatures = false; + features = [ "alloc" ]; + } + { + name = "http"; + packageId = "http"; + } + { + name = "http-body"; + packageId = "http-body"; + } + { + name = "hyper"; + packageId = "hyper"; + features = [ "stream" ]; + } + { + name = "itoa"; + packageId = "itoa"; + } + { + name = "matchit"; + packageId = "matchit"; + } + { + name = "memchr"; + packageId = "memchr"; + } + { + name = "mime"; + packageId = "mime"; + } + { + name = "percent-encoding"; + packageId = "percent-encoding"; + } + { + name = "pin-project-lite"; + packageId = "pin-project-lite"; + } + { + name = "serde"; + packageId = "serde"; + } + { + name = "sync_wrapper"; + packageId = "sync_wrapper"; + } + { + name = "tower"; + packageId = "tower"; + usesDefaultFeatures = false; + features = [ "util" ]; + } + { + name = "tower-layer"; + packageId = "tower-layer"; + } + { + name = "tower-service"; + packageId = "tower-service"; + } + ]; + buildDependencies = [ + { + name = "rustversion"; + packageId = "rustversion"; + } + ]; + devDependencies = [ + { + name = "rustversion"; + packageId = "rustversion"; + } + { + name = "serde"; + packageId = "serde"; + features = [ "derive" ]; + } + { + name = "tower"; + packageId = "tower"; + rename = "tower"; + features = [ "util" "timeout" "limit" "load-shed" "steer" "filter" ]; + } + ]; + features = { + "__private_docs" = [ "tower/full" "dep:tower-http" ]; + "default" = [ "form" "http1" "json" "matched-path" "original-uri" "query" "tokio" "tower-log" ]; + "form" = [ "dep:serde_urlencoded" ]; + "headers" = [ "dep:headers" ]; + "http1" = [ "hyper/http1" ]; + "http2" = [ "hyper/http2" ]; + "json" = [ "dep:serde_json" "dep:serde_path_to_error" ]; + "macros" = [ "dep:axum-macros" ]; + "multipart" = [ "dep:multer" ]; + "query" = [ "dep:serde_urlencoded" ]; + "tokio" = [ "dep:tokio" "hyper/server" "hyper/tcp" "hyper/runtime" "tower/make" ]; + "tower-log" = [ "tower/log" ]; + "tracing" = [ "dep:tracing" "axum-core/tracing" ]; + "ws" = [ "tokio" "dep:tokio-tungstenite" "dep:sha1" "dep:base64" ]; + }; + }; + "axum-core" = rec { + crateName = "axum-core"; + version = "0.3.4"; + edition = "2021"; + sha256 = "0b1d9nkqb8znaba4qqzxzc968qwj4ybn4vgpyz9lz4a7l9vsb7vm"; + dependencies = [ + { + name = "async-trait"; + packageId = "async-trait"; + } + { + name = "bytes"; + packageId = "bytes"; + } + { + name = "futures-util"; + packageId = "futures-util"; + usesDefaultFeatures = false; + features = [ "alloc" ]; + } + { + name = "http"; + packageId = "http"; + } + { + name = "http-body"; + packageId = "http-body"; + } + { + name = "mime"; + packageId = "mime"; + } + { + name = "tower-layer"; + packageId = "tower-layer"; + } + { + name = "tower-service"; + packageId = "tower-service"; + } + ]; + buildDependencies = [ + { + name = "rustversion"; + packageId = "rustversion"; + } + ]; + devDependencies = [ + { + name = "futures-util"; + packageId = "futures-util"; + usesDefaultFeatures = false; + features = [ "alloc" ]; + } + ]; + features = { + "__private_docs" = [ "dep:tower-http" ]; + "tracing" = [ "dep:tracing" ]; + }; + }; + "base64 0.10.1" = rec { + crateName = "base64"; + version = "0.10.1"; + edition = "2015"; + sha256 = "13k6bvd3n6dm7jqn9x918w65dd9xhx454bqphbnv0bkd6n9dj98b"; + authors = [ + "Alice Maz <alice@alicemaz.com>" + "Marshall Pierce <marshall@mpierce.org>" + ]; + dependencies = [ + { + name = "byteorder"; + packageId = "byteorder"; + } + ]; + + }; + "base64 0.13.1" = rec { + crateName = "base64"; + version = "0.13.1"; + edition = "2018"; + sha256 = "1s494mqmzjb766fy1kqlccgfg2sdcjb6hzbvzqv2jw65fdi5h6wy"; + authors = [ + "Alice Maz <alice@alicemaz.com>" + "Marshall Pierce <marshall@mpierce.org>" + ]; + features = { + "default" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "bit-set" = rec { + crateName = "bit-set"; + version = "0.5.3"; + edition = "2015"; + sha256 = "1wcm9vxi00ma4rcxkl3pzzjli6ihrpn9cfdi0c5b4cvga2mxs007"; + authors = [ + "Alexis Beingessner <a.beingessner@gmail.com>" + ]; + dependencies = [ + { + name = "bit-vec"; + packageId = "bit-vec"; + usesDefaultFeatures = false; + } + ]; + features = { + "default" = [ "std" ]; + "std" = [ "bit-vec/std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "bit-vec" = rec { + crateName = "bit-vec"; + version = "0.6.3"; + edition = "2015"; + sha256 = "1ywqjnv60cdh1slhz67psnp422md6jdliji6alq0gmly2xm9p7rl"; + authors = [ + "Alexis Beingessner <a.beingessner@gmail.com>" + ]; + features = { + "default" = [ "std" ]; + "serde" = [ "dep:serde" ]; + "serde_no_std" = [ "serde/alloc" ]; + "serde_std" = [ "std" "serde/std" ]; + }; + resolvedDefaultFeatures = [ "std" ]; + }; + "bitflags" = rec { + crateName = "bitflags"; + version = "1.3.2"; + edition = "2018"; + sha256 = "12ki6w8gn1ldq7yz9y680llwk5gmrhrzszaa17g1sbrw2r2qvwxy"; + authors = [ + "The Rust Project Developers" + ]; + features = { + "compiler_builtins" = [ "dep:compiler_builtins" ]; + "core" = [ "dep:core" ]; + "rustc-dep-of-std" = [ "core" "compiler_builtins" ]; + }; + resolvedDefaultFeatures = [ "default" ]; + }; + "bitmaps" = rec { + crateName = "bitmaps"; + version = "3.2.0"; + edition = "2021"; + sha256 = "00ql08pm4l9hizkldyy54v0pk96g7zg8x6i72c2vkcq0iawl4dkh"; + authors = [ + "Bodil Stokke <bodil@bodil.org>" + ]; + features = { + "default" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "blake3" = rec { + crateName = "blake3"; + version = "1.3.3"; + edition = "2018"; + sha256 = "1vyckzpfq46dkxyvy12gkx4nddr5g93alh38i1ka8i4mm1l29bj2"; + authors = [ + "Jack O'Connor <oconnor663@gmail.com>" + "Samuel Neves" + ]; + dependencies = [ + { + name = "arrayref"; + packageId = "arrayref"; + } + { + name = "arrayvec"; + packageId = "arrayvec"; + usesDefaultFeatures = false; + } + { + name = "cfg-if"; + packageId = "cfg-if"; + } + { + name = "constant_time_eq"; + packageId = "constant_time_eq"; + } + { + name = "digest"; + packageId = "digest 0.10.6"; + optional = true; + features = [ "mac" ]; + } + { + name = "rayon"; + packageId = "rayon"; + optional = true; + } + ]; + buildDependencies = [ + { + name = "cc"; + packageId = "cc"; + } + ]; + features = { + "default" = [ "std" ]; + "digest" = [ "dep:digest" ]; + "rayon" = [ "dep:rayon" ]; + "std" = [ "digest/std" ]; + "traits-preview" = [ "digest" ]; + }; + resolvedDefaultFeatures = [ "default" "digest" "rayon" "std" ]; + }; + "block-buffer 0.10.4" = rec { + crateName = "block-buffer"; + version = "0.10.4"; + edition = "2018"; + sha256 = "0w9sa2ypmrsqqvc20nhwr75wbb5cjr4kkyhpjm1z1lv2kdicfy1h"; + authors = [ + "RustCrypto Developers" + ]; + dependencies = [ + { + name = "generic-array"; + packageId = "generic-array 0.14.7"; + } + ]; + + }; + "block-buffer 0.7.3" = rec { + crateName = "block-buffer"; + version = "0.7.3"; + edition = "2015"; + sha256 = "12v8wizynqin0hqf140kmp9s38q223mp1b0hkqk8j5pk8720v560"; + authors = [ + "RustCrypto Developers" + ]; + dependencies = [ + { + name = "block-padding"; + packageId = "block-padding"; + } + { + name = "byte-tools"; + packageId = "byte-tools"; + } + { + name = "byteorder"; + packageId = "byteorder"; + usesDefaultFeatures = false; + } + { + name = "generic-array"; + packageId = "generic-array 0.12.4"; + } + ]; + + }; + "block-padding" = rec { + crateName = "block-padding"; + version = "0.1.5"; + edition = "2015"; + sha256 = "1xbkmysiz23vimd17rnsjpw9bgjxipwfslwyygqlkx4in3dxwygs"; + authors = [ + "RustCrypto Developers" + ]; + dependencies = [ + { + name = "byte-tools"; + packageId = "byte-tools"; + } + ]; + + }; + "bumpalo" = rec { + crateName = "bumpalo"; + version = "3.12.1"; + edition = "2021"; + sha256 = "1j7hjszs00lgl1ddwg4369f4jh87cbpf1m3xzczz751n0scy274v"; + authors = [ + "Nick Fitzgerald <fitzgen@gmail.com>" + ]; + features = { }; + resolvedDefaultFeatures = [ "default" ]; + }; + "byte-tools" = rec { + crateName = "byte-tools"; + version = "0.3.1"; + edition = "2015"; + sha256 = "1mqi29wsm8njpl51pfwr31wmpzs5ahlcb40wsjyd92l90ixcmdg3"; + authors = [ + "RustCrypto Developers" + ]; + + }; + "byteorder" = rec { + crateName = "byteorder"; + version = "1.4.3"; + edition = "2018"; + sha256 = "0456lv9xi1a5bcm32arknf33ikv76p3fr9yzki4lb2897p2qkh8l"; + authors = [ + "Andrew Gallant <jamslam@gmail.com>" + ]; + features = { + "default" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "bytes" = rec { + crateName = "bytes"; + version = "1.4.0"; + edition = "2018"; + sha256 = "1gkh3fk4fm9xv5znlib723h5md5sxsvbd5113sbxff6g1lmgvcl9"; + authors = [ + "Carl Lerche <me@carllerche.com>" + "Sean McArthur <sean@seanmonstar.com>" + ]; + features = { + "default" = [ "std" ]; + "serde" = [ "dep:serde" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "cast" = rec { + crateName = "cast"; + version = "0.3.0"; + edition = "2018"; + sha256 = "1dbyngbyz2qkk0jn2sxil8vrz3rnpcj142y184p9l4nbl9radcip"; + authors = [ + "Jorge Aparicio <jorge@japaric.io>" + ]; + features = { }; + }; + "cc" = rec { + crateName = "cc"; + version = "1.0.79"; + edition = "2018"; + crateBin = [ ]; + sha256 = "07x93b8zbf3xc2dggdd460xlk1wg8lxm6yflwddxj8b15030klsh"; + authors = [ + "Alex Crichton <alex@alexcrichton.com>" + ]; + dependencies = [ + { + name = "jobserver"; + packageId = "jobserver"; + optional = true; + } + ]; + features = { + "jobserver" = [ "dep:jobserver" ]; + "parallel" = [ "jobserver" ]; + }; + resolvedDefaultFeatures = [ "jobserver" "parallel" ]; + }; + "cfg-if" = rec { + crateName = "cfg-if"; + version = "1.0.0"; + edition = "2018"; + sha256 = "1za0vb97n4brpzpv8lsbnzmq5r8f2b0cpqqr0sy8h5bn751xxwds"; + authors = [ + "Alex Crichton <alex@alexcrichton.com>" + ]; + features = { + "compiler_builtins" = [ "dep:compiler_builtins" ]; + "core" = [ "dep:core" ]; + "rustc-dep-of-std" = [ "core" "compiler_builtins" ]; + }; + }; + "ciborium" = rec { + crateName = "ciborium"; + version = "0.2.0"; + edition = "2021"; + sha256 = "13vqkm88kaq8nvxhaj6qsl0gsc16rqsin014fx5902y6iib3ghdh"; + authors = [ + "Nathaniel McCallum <npmccallum@profian.com>" + ]; + dependencies = [ + { + name = "ciborium-io"; + packageId = "ciborium-io"; + features = [ "alloc" ]; + } + { + name = "ciborium-ll"; + packageId = "ciborium-ll"; + } + { + name = "serde"; + packageId = "serde"; + usesDefaultFeatures = false; + features = [ "alloc" "derive" ]; + } + ]; + features = { + "default" = [ "std" ]; + "std" = [ "ciborium-io/std" "serde/std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "ciborium-io" = rec { + crateName = "ciborium-io"; + version = "0.2.0"; + edition = "2021"; + sha256 = "0sdkk7l7pqi2nsbm9c6g8im1gb1qdd83l25ja9xwhg07mx9yfv9l"; + authors = [ + "Nathaniel McCallum <npmccallum@profian.com>" + ]; + features = { + "std" = [ "alloc" ]; + }; + resolvedDefaultFeatures = [ "alloc" "std" ]; + }; + "ciborium-ll" = rec { + crateName = "ciborium-ll"; + version = "0.2.0"; + edition = "2021"; + sha256 = "06ygqh33k3hp9r9mma43gf189b6cyq62clk65f4w1q54nni30c11"; + authors = [ + "Nathaniel McCallum <npmccallum@profian.com>" + ]; + dependencies = [ + { + name = "ciborium-io"; + packageId = "ciborium-io"; + } + { + name = "half"; + packageId = "half"; + } + ]; + features = { + "std" = [ "alloc" ]; + }; + }; + "clap 3.2.25" = rec { + crateName = "clap"; + version = "3.2.25"; + edition = "2021"; + crateBin = [ ]; + sha256 = "08vi402vfqmfj9f07c4gl6082qxgf4c9x98pbndcnwbgaszq38af"; + dependencies = [ + { + name = "bitflags"; + packageId = "bitflags"; + } + { + name = "clap_lex"; + packageId = "clap_lex 0.2.4"; + } + { + name = "indexmap"; + packageId = "indexmap"; + } + { + name = "textwrap"; + packageId = "textwrap"; + usesDefaultFeatures = false; + } + ]; + features = { + "atty" = [ "dep:atty" ]; + "backtrace" = [ "dep:backtrace" ]; + "cargo" = [ "once_cell" ]; + "clap_derive" = [ "dep:clap_derive" ]; + "color" = [ "atty" "termcolor" ]; + "debug" = [ "clap_derive/debug" "backtrace" ]; + "default" = [ "std" "color" "suggestions" ]; + "deprecated" = [ "clap_derive/deprecated" ]; + "derive" = [ "clap_derive" "once_cell" ]; + "once_cell" = [ "dep:once_cell" ]; + "regex" = [ "dep:regex" ]; + "std" = [ "indexmap/std" ]; + "strsim" = [ "dep:strsim" ]; + "suggestions" = [ "strsim" ]; + "termcolor" = [ "dep:termcolor" ]; + "terminal_size" = [ "dep:terminal_size" ]; + "unicase" = [ "dep:unicase" ]; + "unicode" = [ "textwrap/unicode-width" "unicase" ]; + "unstable-doc" = [ "derive" "cargo" "wrap_help" "yaml" "env" "unicode" "regex" "unstable-replace" "unstable-grouped" ]; + "unstable-v4" = [ "clap_derive/unstable-v4" "deprecated" ]; + "wrap_help" = [ "terminal_size" "textwrap/terminal_size" ]; + "yaml" = [ "yaml-rust" ]; + "yaml-rust" = [ "dep:yaml-rust" ]; + }; + resolvedDefaultFeatures = [ "std" ]; + }; + "clap 4.2.7" = rec { + crateName = "clap"; + version = "4.2.7"; + edition = "2021"; + crateBin = [ ]; + sha256 = "0f69iwzh30wbf01ka1k4fa1mxzh22b4iwqs3i6bd49dly6dizlil"; + dependencies = [ + { + name = "clap_builder"; + packageId = "clap_builder"; + usesDefaultFeatures = false; + } + { + name = "clap_derive"; + packageId = "clap_derive"; + optional = true; + } + { + name = "once_cell"; + packageId = "once_cell"; + optional = true; + } + ]; + features = { + "cargo" = [ "clap_builder/cargo" ]; + "color" = [ "clap_builder/color" ]; + "debug" = [ "clap_builder/debug" "clap_derive?/debug" ]; + "default" = [ "std" "color" "help" "usage" "error-context" "suggestions" ]; + "deprecated" = [ "clap_builder/deprecated" "clap_derive?/deprecated" ]; + "derive" = [ "dep:clap_derive" "dep:once_cell" ]; + "env" = [ "clap_builder/env" ]; + "error-context" = [ "clap_builder/error-context" ]; + "help" = [ "clap_builder/help" ]; + "std" = [ "clap_builder/std" ]; + "string" = [ "clap_builder/string" ]; + "suggestions" = [ "clap_builder/suggestions" ]; + "unicode" = [ "clap_builder/unicode" ]; + "unstable-doc" = [ "clap_builder/unstable-doc" "derive" ]; + "unstable-styles" = [ "clap_builder/unstable-styles" ]; + "unstable-v5" = [ "clap_builder/unstable-v5" "clap_derive?/unstable-v5" "deprecated" ]; + "usage" = [ "clap_builder/usage" ]; + "wrap_help" = [ "clap_builder/wrap_help" ]; + }; + resolvedDefaultFeatures = [ "color" "default" "derive" "env" "error-context" "help" "std" "suggestions" "usage" ]; + }; + "clap_builder" = rec { + crateName = "clap_builder"; + version = "4.2.7"; + edition = "2021"; + sha256 = "1gbhk6r14gr0yxc8qs4flbvn5j1302ikk522ys7263snzdwqqk4i"; + dependencies = [ + { + name = "anstream"; + packageId = "anstream"; + optional = true; + } + { + name = "anstyle"; + packageId = "anstyle"; + } + { + name = "bitflags"; + packageId = "bitflags"; + } + { + name = "clap_lex"; + packageId = "clap_lex 0.4.1"; + } + { + name = "strsim"; + packageId = "strsim"; + optional = true; + } + ]; + features = { + "cargo" = [ "dep:once_cell" ]; + "color" = [ "dep:anstream" ]; + "debug" = [ "dep:backtrace" ]; + "default" = [ "std" "color" "help" "usage" "error-context" "suggestions" ]; + "std" = [ "anstyle/std" ]; + "suggestions" = [ "dep:strsim" "error-context" ]; + "unicode" = [ "dep:unicode-width" "dep:unicase" ]; + "unstable-doc" = [ "cargo" "wrap_help" "env" "unicode" "string" "unstable-styles" ]; + "unstable-styles" = [ "color" ]; + "unstable-v5" = [ "deprecated" ]; + "wrap_help" = [ "help" "dep:terminal_size" ]; + }; + resolvedDefaultFeatures = [ "color" "env" "error-context" "help" "std" "suggestions" "usage" ]; + }; + "clap_derive" = rec { + crateName = "clap_derive"; + version = "4.2.0"; + edition = "2021"; + sha256 = "1i65yn9n1hydvwrimqp9civ67h1iwd9v1y4yi6z7vf6nav6l95iz"; + procMacro = true; + dependencies = [ + { + name = "heck"; + packageId = "heck"; + } + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "syn"; + packageId = "syn 2.0.15"; + features = [ "full" ]; + } + ]; + features = { + "raw-deprecated" = [ "deprecated" ]; + "unstable-v5" = [ "deprecated" ]; + }; + resolvedDefaultFeatures = [ "default" ]; + }; + "clap_lex 0.2.4" = rec { + crateName = "clap_lex"; + version = "0.2.4"; + edition = "2021"; + sha256 = "1ib1a9v55ybnaws11l63az0jgz5xiy24jkdgsmyl7grcm3sz4l18"; + dependencies = [ + { + name = "os_str_bytes"; + packageId = "os_str_bytes"; + usesDefaultFeatures = false; + features = [ "raw_os_str" ]; + } + ]; + + }; + "clap_lex 0.4.1" = rec { + crateName = "clap_lex"; + version = "0.4.1"; + edition = "2021"; + sha256 = "18dyxyc0g5xrazj8k6mdjd8v3fvka8z3b9k8yl13avlczskdabca"; + + }; + "clipboard-win" = rec { + crateName = "clipboard-win"; + version = "4.5.0"; + edition = "2018"; + sha256 = "0qh3rypkf1lazniq4nr04hxsck0d55rigb5sjvpvgnap4dyc54bi"; + authors = [ + "Douman <douman@gmx.se>" + ]; + dependencies = [ + { + name = "error-code"; + packageId = "error-code"; + target = { target, features }: (target."windows" or false); + } + { + name = "str-buf"; + packageId = "str-buf"; + target = { target, features }: (target."windows" or false); + } + { + name = "winapi"; + packageId = "winapi"; + usesDefaultFeatures = false; + target = { target, features }: (target."windows" or false); + features = [ "basetsd" "shellapi" "winbase" "winuser" "winerror" "stringapiset" "errhandlingapi" "synchapi" ]; + } + ]; + features = { + "std" = [ "error-code/std" ]; + }; + }; + "codemap" = rec { + crateName = "codemap"; + version = "0.1.3"; + edition = "2015"; + sha256 = "091azkslwkcijj3lp9ymb084y9a0wm4fkil7m613ja68r2snkrxr"; + authors = [ + "Kevin Mehall <km@kevinmehall.net>" + ]; + + }; + "codemap-diagnostic" = rec { + crateName = "codemap-diagnostic"; + version = "0.1.1"; + edition = "2015"; + sha256 = "0a2hpb57f97816fjz89qrsz1b5r4j2s2a1p9z58ffx17iszfd82b"; + authors = [ + "Kevin Mehall <km@kevinmehall.net>" + "The Rust Project Developers" + ]; + dependencies = [ + { + name = "atty"; + packageId = "atty"; + } + { + name = "codemap"; + packageId = "codemap"; + } + { + name = "termcolor"; + packageId = "termcolor"; + } + ]; + + }; + "colorchoice" = rec { + crateName = "colorchoice"; + version = "1.0.0"; + edition = "2021"; + sha256 = "1ix7w85kwvyybwi2jdkl3yva2r2bvdcc3ka2grjfzfgrapqimgxc"; + + }; + "constant_time_eq" = rec { + crateName = "constant_time_eq"; + version = "0.2.5"; + edition = "2021"; + sha256 = "0sy7bs12dfa2d5hw7759b0mvjqcs85giajg4qyg39xq8a1s8wh8k"; + authors = [ + "Cesar Eduardo Barros <cesarb@cesarb.eti.br>" + ]; + + }; + "count-write" = rec { + crateName = "count-write"; + version = "0.1.0"; + edition = "2018"; + sha256 = "11bswmgr81s3jagdci1pr6qh9vnz9zsbbf2dqpi260daa2mhgmff"; + authors = [ + "SOFe <sofe2038@gmail.com>" + ]; + features = { + "futures" = [ "futures-io-preview" ]; + "futures-io-preview" = [ "dep:futures-io-preview" ]; + "tokio" = [ "tokio-io" ]; + "tokio-io" = [ "dep:tokio-io" ]; + }; + }; + "countme" = rec { + crateName = "countme"; + version = "3.0.1"; + edition = "2018"; + sha256 = "0dn62hhvgmwyxslh14r4nlbvz8h50cp5mnn1qhqsw63vs7yva13p"; + authors = [ + "Aleksey Kladov <aleksey.kladov@gmail.com>" + ]; + features = { + "dashmap" = [ "dep:dashmap" ]; + "enable" = [ "dashmap" "once_cell" "rustc-hash" ]; + "once_cell" = [ "dep:once_cell" ]; + "print_at_exit" = [ "enable" ]; + "rustc-hash" = [ "dep:rustc-hash" ]; + }; + }; + "cpufeatures" = rec { + crateName = "cpufeatures"; + version = "0.2.7"; + edition = "2018"; + sha256 = "0n7y7ls0g1svrjr6ymjv338q8ajc91sv2amdpgn7pi0j42m1wk1y"; + authors = [ + "RustCrypto Developers" + ]; + dependencies = [ + { + name = "libc"; + packageId = "libc"; + target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-linux-android"); + } + { + name = "libc"; + packageId = "libc"; + target = { target, features }: (("aarch64" == target."arch") && ("linux" == target."os")); + } + { + name = "libc"; + packageId = "libc"; + target = { target, features }: (("aarch64" == target."arch") && ("apple" == target."vendor")); + } + ]; + + }; + "crc32fast" = rec { + crateName = "crc32fast"; + version = "1.3.2"; + edition = "2015"; + sha256 = "03c8f29yx293yf43xar946xbls1g60c207m9drf8ilqhr25vsh5m"; + authors = [ + "Sam Rijs <srijs@airpost.net>" + "Alex Crichton <alex@alexcrichton.com>" + ]; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + ]; + features = { + "default" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "criterion" = rec { + crateName = "criterion"; + version = "0.4.0"; + edition = "2018"; + sha256 = "1jsl4r0yc3fpkyjbi8aa1jrm69apqq9rxwnjnd9brqmaq44nxiz7"; + authors = [ + "Jorge Aparicio <japaricious@gmail.com>" + "Brook Heisler <brookheisler@gmail.com>" + ]; + dependencies = [ + { + name = "anes"; + packageId = "anes"; + } + { + name = "atty"; + packageId = "atty"; + } + { + name = "cast"; + packageId = "cast"; + } + { + name = "ciborium"; + packageId = "ciborium"; + } + { + name = "clap"; + packageId = "clap 3.2.25"; + usesDefaultFeatures = false; + features = [ "std" ]; + } + { + name = "criterion-plot"; + packageId = "criterion-plot"; + } + { + name = "itertools"; + packageId = "itertools"; + } + { + name = "lazy_static"; + packageId = "lazy_static"; + } + { + name = "num-traits"; + packageId = "num-traits"; + usesDefaultFeatures = false; + features = [ "std" ]; + } + { + name = "oorandom"; + packageId = "oorandom"; + } + { + name = "plotters"; + packageId = "plotters"; + optional = true; + usesDefaultFeatures = false; + features = [ "svg_backend" "area_series" "line_series" ]; + } + { + name = "rayon"; + packageId = "rayon"; + optional = true; + } + { + name = "regex"; + packageId = "regex"; + usesDefaultFeatures = false; + features = [ "std" ]; + } + { + name = "serde"; + packageId = "serde"; + } + { + name = "serde_derive"; + packageId = "serde_derive"; + } + { + name = "serde_json"; + packageId = "serde_json"; + } + { + name = "tinytemplate"; + packageId = "tinytemplate"; + } + { + name = "walkdir"; + packageId = "walkdir"; + } + ]; + features = { + "async" = [ "futures" ]; + "async-std" = [ "dep:async-std" ]; + "async_futures" = [ "futures/executor" "async" ]; + "async_smol" = [ "smol" "async" ]; + "async_std" = [ "async-std" "async" ]; + "async_tokio" = [ "tokio" "async" ]; + "csv" = [ "dep:csv" ]; + "csv_output" = [ "csv" ]; + "default" = [ "rayon" "plotters" "cargo_bench_support" ]; + "futures" = [ "dep:futures" ]; + "plotters" = [ "dep:plotters" ]; + "rayon" = [ "dep:rayon" ]; + "smol" = [ "dep:smol" ]; + "stable" = [ "csv_output" "html_reports" "async_futures" "async_smol" "async_tokio" "async_std" ]; + "tokio" = [ "dep:tokio" ]; + }; + resolvedDefaultFeatures = [ "cargo_bench_support" "default" "plotters" "rayon" ]; + }; + "criterion-plot" = rec { + crateName = "criterion-plot"; + version = "0.5.0"; + edition = "2018"; + sha256 = "1c866xkjqqhzg4cjvg01f8w6xc1j3j7s58rdksl52skq89iq4l3b"; + authors = [ + "Jorge Aparicio <japaricious@gmail.com>" + "Brook Heisler <brookheisler@gmail.com>" + ]; + dependencies = [ + { + name = "cast"; + packageId = "cast"; + } + { + name = "itertools"; + packageId = "itertools"; + } + ]; + + }; + "crossbeam-channel" = rec { + crateName = "crossbeam-channel"; + version = "0.5.8"; + edition = "2018"; + sha256 = "004jz4wxp9k26z657i7rsh9s7586dklx2c5aqf1n3w1dgzvjng53"; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + { + name = "crossbeam-utils"; + packageId = "crossbeam-utils"; + optional = true; + usesDefaultFeatures = false; + } + ]; + features = { + "crossbeam-utils" = [ "dep:crossbeam-utils" ]; + "default" = [ "std" ]; + "std" = [ "crossbeam-utils/std" ]; + }; + resolvedDefaultFeatures = [ "crossbeam-utils" "default" "std" ]; + }; + "crossbeam-deque" = rec { + crateName = "crossbeam-deque"; + version = "0.8.3"; + edition = "2018"; + sha256 = "1vqczbcild7nczh5z116w8w46z991kpjyw7qxkf24c14apwdcvyf"; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + { + name = "crossbeam-epoch"; + packageId = "crossbeam-epoch"; + optional = true; + usesDefaultFeatures = false; + } + { + name = "crossbeam-utils"; + packageId = "crossbeam-utils"; + optional = true; + usesDefaultFeatures = false; + } + ]; + features = { + "crossbeam-epoch" = [ "dep:crossbeam-epoch" ]; + "crossbeam-utils" = [ "dep:crossbeam-utils" ]; + "default" = [ "std" ]; + "std" = [ "crossbeam-epoch/std" "crossbeam-utils/std" ]; + }; + resolvedDefaultFeatures = [ "crossbeam-epoch" "crossbeam-utils" "default" "std" ]; + }; + "crossbeam-epoch" = rec { + crateName = "crossbeam-epoch"; + version = "0.9.14"; + edition = "2018"; + sha256 = "15anryfq33mhxnlw95ajixnzznxays3gpvaas6lraci7hlzmzga6"; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + { + name = "crossbeam-utils"; + packageId = "crossbeam-utils"; + usesDefaultFeatures = false; + } + { + name = "memoffset"; + packageId = "memoffset"; + } + { + name = "scopeguard"; + packageId = "scopeguard"; + usesDefaultFeatures = false; + } + ]; + buildDependencies = [ + { + name = "autocfg"; + packageId = "autocfg"; + } + ]; + features = { + "default" = [ "std" ]; + "loom" = [ "loom-crate" "crossbeam-utils/loom" ]; + "loom-crate" = [ "dep:loom-crate" ]; + "nightly" = [ "crossbeam-utils/nightly" ]; + "std" = [ "alloc" "crossbeam-utils/std" ]; + }; + resolvedDefaultFeatures = [ "alloc" "default" "std" ]; + }; + "crossbeam-utils" = rec { + crateName = "crossbeam-utils"; + version = "0.8.15"; + edition = "2018"; + sha256 = "0jwq8srmjcwvq9q883k9zyb26qqznaj4jjqdxmvw7xcmrkc3q1iw"; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + ]; + features = { + "default" = [ "std" ]; + "loom" = [ "dep:loom" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "crypto-common" = rec { + crateName = "crypto-common"; + version = "0.1.6"; + edition = "2018"; + sha256 = "1cvby95a6xg7kxdz5ln3rl9xh66nz66w46mm3g56ri1z5x815yqv"; + authors = [ + "RustCrypto Developers" + ]; + dependencies = [ + { + name = "generic-array"; + packageId = "generic-array 0.14.7"; + features = [ "more_lengths" ]; + } + { + name = "typenum"; + packageId = "typenum"; + } + ]; + features = { + "getrandom" = [ "rand_core/getrandom" ]; + "rand_core" = [ "dep:rand_core" ]; + }; + resolvedDefaultFeatures = [ "std" ]; + }; + "ctor" = rec { + crateName = "ctor"; + version = "0.1.26"; + edition = "2018"; + sha256 = "15m0wqhv12p25xkxz5dxvg23r7a6bkh7p8zi1cdhgswjhdl028vd"; + procMacro = true; + authors = [ + "Matt Mastracci <matthew@mastracci.com>" + ]; + dependencies = [ + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "syn"; + packageId = "syn 1.0.109"; + usesDefaultFeatures = false; + features = [ "full" "parsing" "printing" "proc-macro" ]; + } + ]; + + }; + "data-encoding" = rec { + crateName = "data-encoding"; + version = "2.3.3"; + edition = "2018"; + sha256 = "1yq8jnivxsjzl3mjbjdjg5kfvd17wawbmg1jvsfw6cqmn1n6dn13"; + authors = [ + "Julien Cretin <git@ia0.eu>" + ]; + features = { + "default" = [ "std" ]; + "std" = [ "alloc" ]; + }; + resolvedDefaultFeatures = [ "alloc" "default" "std" ]; + }; + "diff" = rec { + crateName = "diff"; + version = "0.1.13"; + edition = "2015"; + sha256 = "1j0nzjxci2zqx63hdcihkp0a4dkdmzxd7my4m7zk6cjyfy34j9an"; + authors = [ + "Utkarsh Kukreti <utkarshkukreti@gmail.com>" + ]; + + }; + "digest 0.10.6" = rec { + crateName = "digest"; + version = "0.10.6"; + edition = "2018"; + sha256 = "0vz74785s96g727vg37iwkjvbkcfzp093j49ihhyf8sh9s7kfs41"; + authors = [ + "RustCrypto Developers" + ]; + dependencies = [ + { + name = "block-buffer"; + packageId = "block-buffer 0.10.4"; + optional = true; + } + { + name = "crypto-common"; + packageId = "crypto-common"; + } + { + name = "subtle"; + packageId = "subtle"; + optional = true; + usesDefaultFeatures = false; + } + ]; + features = { + "blobby" = [ "dep:blobby" ]; + "block-buffer" = [ "dep:block-buffer" ]; + "const-oid" = [ "dep:const-oid" ]; + "core-api" = [ "block-buffer" ]; + "default" = [ "core-api" ]; + "dev" = [ "blobby" ]; + "mac" = [ "subtle" ]; + "oid" = [ "const-oid" ]; + "rand_core" = [ "crypto-common/rand_core" ]; + "std" = [ "alloc" "crypto-common/std" ]; + "subtle" = [ "dep:subtle" ]; + }; + resolvedDefaultFeatures = [ "alloc" "block-buffer" "core-api" "default" "mac" "std" "subtle" ]; + }; + "digest 0.8.1" = rec { + crateName = "digest"; + version = "0.8.1"; + edition = "2015"; + sha256 = "1madjl27f3kj5ql7kwgvb9c8b7yb7bv7yfgx7rqzj4i3fp4cil7k"; + authors = [ + "RustCrypto Developers" + ]; + dependencies = [ + { + name = "generic-array"; + packageId = "generic-array 0.12.4"; + } + ]; + features = { + "blobby" = [ "dep:blobby" ]; + "dev" = [ "blobby" ]; + }; + resolvedDefaultFeatures = [ "std" ]; + }; + "dirs" = rec { + crateName = "dirs"; + version = "4.0.0"; + edition = "2015"; + sha256 = "0n8020zl4f0frfnzvgb9agvk4a14i1kjz4daqnxkgslndwmaffna"; + authors = [ + "Simon Ochsenreither <simon@ochsenreither.de>" + ]; + dependencies = [ + { + name = "dirs-sys"; + packageId = "dirs-sys"; + } + ]; + + }; + "dirs-next" = rec { + crateName = "dirs-next"; + version = "2.0.0"; + edition = "2018"; + sha256 = "1q9kr151h9681wwp6is18750ssghz6j9j7qm7qi1ngcwy7mzi35r"; + authors = [ + "The @xdg-rs members" + ]; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + { + name = "dirs-sys-next"; + packageId = "dirs-sys-next"; + } + ]; + + }; + "dirs-sys" = rec { + crateName = "dirs-sys"; + version = "0.3.7"; + edition = "2015"; + sha256 = "19md1cnkazham8a6kh22v12d8hh3raqahfk6yb043vrjr68is78v"; + authors = [ + "Simon Ochsenreither <simon@ochsenreither.de>" + ]; + dependencies = [ + { + name = "libc"; + packageId = "libc"; + target = { target, features }: (target."unix" or false); + } + { + name = "redox_users"; + packageId = "redox_users"; + usesDefaultFeatures = false; + target = { target, features }: ("redox" == target."os"); + } + { + name = "winapi"; + packageId = "winapi"; + target = { target, features }: (target."windows" or false); + features = [ "knownfolders" "objbase" "shlobj" "winbase" "winerror" ]; + } + ]; + + }; + "dirs-sys-next" = rec { + crateName = "dirs-sys-next"; + version = "0.1.2"; + edition = "2018"; + sha256 = "0kavhavdxv4phzj4l0psvh55hszwnr0rcz8sxbvx20pyqi2a3gaf"; + authors = [ + "The @xdg-rs members" + ]; + dependencies = [ + { + name = "libc"; + packageId = "libc"; + target = { target, features }: (target."unix" or false); + } + { + name = "redox_users"; + packageId = "redox_users"; + usesDefaultFeatures = false; + target = { target, features }: ("redox" == target."os"); + } + { + name = "winapi"; + packageId = "winapi"; + target = { target, features }: (target."windows" or false); + features = [ "knownfolders" "objbase" "shlobj" "winbase" "winerror" ]; + } + ]; + + }; + "either" = rec { + crateName = "either"; + version = "1.8.1"; + edition = "2018"; + sha256 = "14bdy4qsxlfnm4626z4shwaiffi8l5krzkn7ykki1jgqzsrapjkz"; + authors = [ + "bluss" + ]; + features = { + "default" = [ "use_std" ]; + "serde" = [ "dep:serde" ]; + }; + resolvedDefaultFeatures = [ "default" "use_std" ]; + }; + "endian-type" = rec { + crateName = "endian-type"; + version = "0.1.2"; + edition = "2015"; + sha256 = "0bbh88zaig1jfqrm7w3gx0pz81kw2jakk3055vbgapw3dmk08ky3"; + authors = [ + "Lolirofle <lolipopple@hotmail.com>" + ]; + + }; + "errno" = rec { + crateName = "errno"; + version = "0.3.1"; + edition = "2018"; + sha256 = "0fp7qy6fwagrnmi45msqnl01vksqwdb2qbbv60n9cz7rf0xfrksb"; + authors = [ + "Chris Wong <lambda.fairy@gmail.com>" + ]; + dependencies = [ + { + name = "errno-dragonfly"; + packageId = "errno-dragonfly"; + target = { target, features }: ("dragonfly" == target."os"); + } + { + name = "libc"; + packageId = "libc"; + target = { target, features }: ("hermit" == target."os"); + } + { + name = "libc"; + packageId = "libc"; + target = { target, features }: ("wasi" == target."os"); + } + { + name = "libc"; + packageId = "libc"; + target = { target, features }: (target."unix" or false); + } + { + name = "windows-sys"; + packageId = "windows-sys 0.48.0"; + target = { target, features }: (target."windows" or false); + features = [ "Win32_Foundation" "Win32_System_Diagnostics_Debug" ]; + } + ]; + features = { + "default" = [ "std" ]; + }; + }; + "errno-dragonfly" = rec { + crateName = "errno-dragonfly"; + version = "0.1.2"; + edition = "2018"; + sha256 = "1grrmcm6q8512hkq5yzch3yv8wafflc2apbmsaabiyk44yqz2s5a"; + authors = [ + "Michael Neumann <mneumann@ntecs.de>" + ]; + dependencies = [ + { + name = "libc"; + packageId = "libc"; + usesDefaultFeatures = false; + } + ]; + buildDependencies = [ + { + name = "cc"; + packageId = "cc"; + } + ]; + + }; + "error-code" = rec { + crateName = "error-code"; + version = "2.3.1"; + edition = "2018"; + sha256 = "08baxlf8qz01lgjsdbfhs193r9y1nlc566s5xvzyf4dzwy8qkwb4"; + authors = [ + "Douman <douman@gmx.se>" + ]; + dependencies = [ + { + name = "libc"; + packageId = "libc"; + usesDefaultFeatures = false; + } + { + name = "str-buf"; + packageId = "str-buf"; + } + ]; + features = { }; + }; + "fake-simd" = rec { + crateName = "fake-simd"; + version = "0.1.2"; + edition = "2015"; + sha256 = "1vfylvk4va2ivqx85603lyqqp0zk52cgbs4n5nfbbbqx577qm2p8"; + authors = [ + "The Rust-Crypto Project Developers" + ]; + + }; + "fastrand" = rec { + crateName = "fastrand"; + version = "1.9.0"; + edition = "2018"; + sha256 = "1gh12m56265ihdbzh46bhh0jf74i197wm51jg1cw75q7ggi96475"; + authors = [ + "Stjepan Glavina <stjepang@gmail.com>" + ]; + dependencies = [ + { + name = "instant"; + packageId = "instant"; + target = { target, features }: (("wasm32" == target."arch") && (!("wasi" == target."os"))); + } + ]; + devDependencies = [ + { + name = "instant"; + packageId = "instant"; + target = { target, features }: (("wasm32" == target."arch") && (!("wasi" == target."os"))); + features = [ "wasm-bindgen" ]; + } + ]; + + }; + "fd-lock" = rec { + crateName = "fd-lock"; + version = "3.0.12"; + edition = "2018"; + sha256 = "0hlnn1302p37qlc9xl2k5y0vw8q8id5kg59an6riy89hjlynpbir"; + authors = [ + "Yoshua Wuyts <yoshuawuyts@gmail.com>" + ]; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + { + name = "rustix"; + packageId = "rustix"; + target = { target, features }: (target."unix" or false); + features = [ "fs" ]; + } + { + name = "windows-sys"; + packageId = "windows-sys 0.48.0"; + target = { target, features }: (target."windows" or false); + features = [ "Win32_Foundation" "Win32_Storage_FileSystem" "Win32_System_IO" ]; + } + ]; + + }; + "fixedbitset" = rec { + crateName = "fixedbitset"; + version = "0.4.2"; + edition = "2015"; + sha256 = "101v41amgv5n9h4hcghvrbfk5vrncx1jwm35rn5szv4rk55i7rqc"; + authors = [ + "bluss" + ]; + features = { + "default" = [ "std" ]; + "serde" = [ "dep:serde" ]; + }; + }; + "fnv" = rec { + crateName = "fnv"; + version = "1.0.7"; + edition = "2015"; + sha256 = "1hc2mcqha06aibcaza94vbi81j6pr9a1bbxrxjfhc91zin8yr7iz"; + libPath = "lib.rs"; + authors = [ + "Alex Crichton <alex@alexcrichton.com>" + ]; + features = { + "default" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "fs2" = rec { + crateName = "fs2"; + version = "0.4.3"; + edition = "2015"; + sha256 = "04v2hwk7035c088f19mfl5b1lz84gnvv2hv6m935n0hmirszqr4m"; + authors = [ + "Dan Burkert <dan@danburkert.com>" + ]; + dependencies = [ + { + name = "libc"; + packageId = "libc"; + target = { target, features }: (target."unix" or false); + } + { + name = "winapi"; + packageId = "winapi"; + target = { target, features }: (target."windows" or false); + features = [ "handleapi" "processthreadsapi" "winerror" "fileapi" "winbase" "std" ]; + } + ]; + + }; + "fuchsia-cprng" = rec { + crateName = "fuchsia-cprng"; + version = "0.1.1"; + edition = "2018"; + sha256 = "1fnkqrbz7ixxzsb04bsz9p0zzazanma8znfdqjvh39n14vapfvx0"; + authors = [ + "Erick Tryzelaar <etryzelaar@google.com>" + ]; + + }; + "futures" = rec { + crateName = "futures"; + version = "0.3.28"; + edition = "2018"; + sha256 = "0h7c1xvxk751c7xlnph6fh3rb77z4lig4qif7f8q79db2az2ld13"; + dependencies = [ + { + name = "futures-channel"; + packageId = "futures-channel"; + usesDefaultFeatures = false; + features = [ "sink" ]; + } + { + name = "futures-core"; + packageId = "futures-core"; + usesDefaultFeatures = false; + } + { + name = "futures-executor"; + packageId = "futures-executor"; + optional = true; + usesDefaultFeatures = false; + } + { + name = "futures-io"; + packageId = "futures-io"; + usesDefaultFeatures = false; + } + { + name = "futures-sink"; + packageId = "futures-sink"; + usesDefaultFeatures = false; + } + { + name = "futures-task"; + packageId = "futures-task"; + usesDefaultFeatures = false; + } + { + name = "futures-util"; + packageId = "futures-util"; + usesDefaultFeatures = false; + features = [ "sink" ]; + } + ]; + features = { + "alloc" = [ "futures-core/alloc" "futures-task/alloc" "futures-sink/alloc" "futures-channel/alloc" "futures-util/alloc" ]; + "async-await" = [ "futures-util/async-await" "futures-util/async-await-macro" ]; + "bilock" = [ "futures-util/bilock" ]; + "compat" = [ "std" "futures-util/compat" ]; + "default" = [ "std" "async-await" "executor" ]; + "executor" = [ "std" "futures-executor/std" ]; + "futures-executor" = [ "dep:futures-executor" ]; + "io-compat" = [ "compat" "futures-util/io-compat" ]; + "std" = [ "alloc" "futures-core/std" "futures-task/std" "futures-io/std" "futures-sink/std" "futures-util/std" "futures-util/io" "futures-util/channel" ]; + "thread-pool" = [ "executor" "futures-executor/thread-pool" ]; + "unstable" = [ "futures-core/unstable" "futures-task/unstable" "futures-channel/unstable" "futures-io/unstable" "futures-util/unstable" ]; + "write-all-vectored" = [ "futures-util/write-all-vectored" ]; + }; + resolvedDefaultFeatures = [ "alloc" "async-await" "default" "executor" "futures-executor" "std" ]; + }; + "futures-channel" = rec { + crateName = "futures-channel"; + version = "0.3.28"; + edition = "2018"; + sha256 = "1wmm9wm5zjigxz61qkscmxp7c30zp08dy63spjz5pch9gva1hmcm"; + dependencies = [ + { + name = "futures-core"; + packageId = "futures-core"; + usesDefaultFeatures = false; + } + { + name = "futures-sink"; + packageId = "futures-sink"; + optional = true; + usesDefaultFeatures = false; + } + ]; + features = { + "alloc" = [ "futures-core/alloc" ]; + "default" = [ "std" ]; + "futures-sink" = [ "dep:futures-sink" ]; + "sink" = [ "futures-sink" ]; + "std" = [ "alloc" "futures-core/std" ]; + }; + resolvedDefaultFeatures = [ "alloc" "default" "futures-sink" "sink" "std" ]; + }; + "futures-core" = rec { + crateName = "futures-core"; + version = "0.3.28"; + edition = "2018"; + sha256 = "137fdxy5amg9zkpa1kqnj7bnha6b94fmddz59w973x96gqxmijjb"; + features = { + "default" = [ "std" ]; + "portable-atomic" = [ "dep:portable-atomic" ]; + "std" = [ "alloc" ]; + }; + resolvedDefaultFeatures = [ "alloc" "default" "std" ]; + }; + "futures-executor" = rec { + crateName = "futures-executor"; + version = "0.3.28"; + edition = "2018"; + sha256 = "1q468di96knnla72xdvswic1ir2qkrf5czsdigc5n4l86a1fxv6c"; + dependencies = [ + { + name = "futures-core"; + packageId = "futures-core"; + usesDefaultFeatures = false; + } + { + name = "futures-task"; + packageId = "futures-task"; + usesDefaultFeatures = false; + } + { + name = "futures-util"; + packageId = "futures-util"; + usesDefaultFeatures = false; + } + ]; + features = { + "default" = [ "std" ]; + "num_cpus" = [ "dep:num_cpus" ]; + "std" = [ "futures-core/std" "futures-task/std" "futures-util/std" ]; + "thread-pool" = [ "std" "num_cpus" ]; + }; + resolvedDefaultFeatures = [ "std" ]; + }; + "futures-io" = rec { + crateName = "futures-io"; + version = "0.3.28"; + edition = "2018"; + sha256 = "0r4rhkdhq1my4fahlhz59barqa511bylq813w3w4gvbidq4p9zsg"; + features = { + "default" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "std" ]; + }; + "futures-macro" = rec { + crateName = "futures-macro"; + version = "0.3.28"; + edition = "2018"; + sha256 = "0wpfsqxwqk5k569xl0jzz4zxy85x695mndf7y9jn66q6jid59jl9"; + procMacro = true; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "syn"; + packageId = "syn 2.0.15"; + features = [ "full" ]; + } + ]; + + }; + "futures-sink" = rec { + crateName = "futures-sink"; + version = "0.3.28"; + edition = "2018"; + sha256 = "0vkv4frf4c6gm1ag9imjz8d0xvpnn22lkylsls0rffx147zf8fzl"; + features = { + "default" = [ "std" ]; + "std" = [ "alloc" ]; + }; + resolvedDefaultFeatures = [ "alloc" "default" "std" ]; + }; + "futures-task" = rec { + crateName = "futures-task"; + version = "0.3.28"; + edition = "2018"; + sha256 = "0ravgihyarbplj32zp60asirfnaalw2wfsa0afhnl3kcpqrd3lvn"; + features = { + "default" = [ "std" ]; + "std" = [ "alloc" ]; + }; + resolvedDefaultFeatures = [ "alloc" "std" ]; + }; + "futures-util" = rec { + crateName = "futures-util"; + version = "0.3.28"; + edition = "2018"; + sha256 = "0cwmls9369w6q6hwlbm10q0plr6hmg8w28fpqvv4rmbjnx01xc16"; + dependencies = [ + { + name = "futures-channel"; + packageId = "futures-channel"; + optional = true; + usesDefaultFeatures = false; + features = [ "std" ]; + } + { + name = "futures-core"; + packageId = "futures-core"; + usesDefaultFeatures = false; + } + { + name = "futures-io"; + packageId = "futures-io"; + optional = true; + usesDefaultFeatures = false; + features = [ "std" ]; + } + { + name = "futures-macro"; + packageId = "futures-macro"; + optional = true; + usesDefaultFeatures = false; + } + { + name = "futures-sink"; + packageId = "futures-sink"; + optional = true; + usesDefaultFeatures = false; + } + { + name = "futures-task"; + packageId = "futures-task"; + usesDefaultFeatures = false; + } + { + name = "memchr"; + packageId = "memchr"; + optional = true; + } + { + name = "pin-project-lite"; + packageId = "pin-project-lite"; + } + { + name = "pin-utils"; + packageId = "pin-utils"; + } + { + name = "slab"; + packageId = "slab"; + optional = true; + } + ]; + features = { + "alloc" = [ "futures-core/alloc" "futures-task/alloc" ]; + "async-await-macro" = [ "async-await" "futures-macro" ]; + "channel" = [ "std" "futures-channel" ]; + "compat" = [ "std" "futures_01" ]; + "default" = [ "std" "async-await" "async-await-macro" ]; + "futures-channel" = [ "dep:futures-channel" ]; + "futures-io" = [ "dep:futures-io" ]; + "futures-macro" = [ "dep:futures-macro" ]; + "futures-sink" = [ "dep:futures-sink" ]; + "futures_01" = [ "dep:futures_01" ]; + "io" = [ "std" "futures-io" "memchr" ]; + "io-compat" = [ "io" "compat" "tokio-io" ]; + "memchr" = [ "dep:memchr" ]; + "portable-atomic" = [ "futures-core/portable-atomic" ]; + "sink" = [ "futures-sink" ]; + "slab" = [ "dep:slab" ]; + "std" = [ "alloc" "futures-core/std" "futures-task/std" "slab" ]; + "tokio-io" = [ "dep:tokio-io" ]; + "unstable" = [ "futures-core/unstable" "futures-task/unstable" ]; + "write-all-vectored" = [ "io" ]; + }; + resolvedDefaultFeatures = [ "alloc" "async-await" "async-await-macro" "channel" "futures-channel" "futures-io" "futures-macro" "futures-sink" "io" "memchr" "sink" "slab" "std" ]; + }; + "fxhash" = rec { + crateName = "fxhash"; + version = "0.2.1"; + edition = "2015"; + sha256 = "037mb9ichariqi45xm6mz0b11pa92gj38ba0409z3iz239sns6y3"; + libPath = "lib.rs"; + authors = [ + "cbreeden <github@u.breeden.cc>" + ]; + dependencies = [ + { + name = "byteorder"; + packageId = "byteorder"; + } + ]; + + }; + "genawaiter" = rec { + crateName = "genawaiter"; + version = "0.99.1"; + edition = "2018"; + sha256 = "1861a6vy9lc9a8lbw496m9j9jcjcn9nf7rkm6jqkkpnb3cvd0sy8"; + authors = [ + "John Simon <john@whatisaph.one>" + ]; + dependencies = [ + { + name = "genawaiter-macro"; + packageId = "genawaiter-macro"; + } + ]; + features = { + "default" = [ "proc_macro" ]; + "futures-core" = [ "dep:futures-core" ]; + "futures03" = [ "futures-core" ]; + "genawaiter-proc-macro" = [ "dep:genawaiter-proc-macro" ]; + "proc-macro-hack" = [ "dep:proc-macro-hack" ]; + "proc_macro" = [ "genawaiter-proc-macro" "proc-macro-hack" "genawaiter-macro/proc_macro" ]; + }; + }; + "genawaiter-macro" = rec { + crateName = "genawaiter-macro"; + version = "0.99.1"; + edition = "2018"; + sha256 = "1g6zmr88fk48f1ksz9ik1i2mwjsiam9s4p9aybhvs2zwzphxychb"; + authors = [ + "Devin R <devin.ragotzy@gmail.com>" + ]; + features = { }; + }; + "generic-array 0.12.4" = rec { + crateName = "generic-array"; + version = "0.12.4"; + edition = "2015"; + sha256 = "1gfpay78vijl9vrwl1k9v7fbvbhkhcmnrk4kfg9l6x24y4s9zpzz"; + libName = "generic_array"; + authors = [ + "Bartłomiej Kamiński <fizyk20@gmail.com>" + "Aaron Trent <novacrazy@gmail.com>" + ]; + dependencies = [ + { + name = "typenum"; + packageId = "typenum"; + } + ]; + features = { + "serde" = [ "dep:serde" ]; + }; + }; + "generic-array 0.14.7" = rec { + crateName = "generic-array"; + version = "0.14.7"; + edition = "2015"; + sha256 = "16lyyrzrljfq424c3n8kfwkqihlimmsg5nhshbbp48np3yjrqr45"; + libName = "generic_array"; + authors = [ + "Bartłomiej Kamiński <fizyk20@gmail.com>" + "Aaron Trent <novacrazy@gmail.com>" + ]; + dependencies = [ + { + name = "typenum"; + packageId = "typenum"; + } + ]; + buildDependencies = [ + { + name = "version_check"; + packageId = "version_check"; + } + ]; + features = { + "serde" = [ "dep:serde" ]; + "zeroize" = [ "dep:zeroize" ]; + }; + resolvedDefaultFeatures = [ "more_lengths" ]; + }; + "getrandom" = rec { + crateName = "getrandom"; + version = "0.2.9"; + edition = "2018"; + sha256 = "1r6p47dd9f9cgiwlxmksammbfwnhsv5hjkhd0kjsgnzanad1spn8"; + authors = [ + "The Rand Project Developers" + ]; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + { + name = "libc"; + packageId = "libc"; + usesDefaultFeatures = false; + target = { target, features }: (target."unix" or false); + } + { + name = "wasi"; + packageId = "wasi"; + usesDefaultFeatures = false; + target = { target, features }: ("wasi" == target."os"); + } + ]; + features = { + "compiler_builtins" = [ "dep:compiler_builtins" ]; + "core" = [ "dep:core" ]; + "js" = [ "wasm-bindgen" "js-sys" ]; + "js-sys" = [ "dep:js-sys" ]; + "rustc-dep-of-std" = [ "compiler_builtins" "core" "libc/rustc-dep-of-std" "wasi/rustc-dep-of-std" ]; + "wasm-bindgen" = [ "dep:wasm-bindgen" ]; + }; + resolvedDefaultFeatures = [ "std" ]; + }; + "glob" = rec { + crateName = "glob"; + version = "0.3.1"; + edition = "2015"; + sha256 = "16zca52nglanv23q5qrwd5jinw3d3as5ylya6y1pbx47vkxvrynj"; + authors = [ + "The Rust Project Developers" + ]; + + }; + "h2" = rec { + crateName = "h2"; + version = "0.3.18"; + edition = "2018"; + sha256 = "08ffidcaswjn30c63whc17s93nr1afh8l4xmd21nhywqq8aaky0p"; + authors = [ + "Carl Lerche <me@carllerche.com>" + "Sean McArthur <sean@seanmonstar.com>" + ]; + dependencies = [ + { + name = "bytes"; + packageId = "bytes"; + } + { + name = "fnv"; + packageId = "fnv"; + } + { + name = "futures-core"; + packageId = "futures-core"; + usesDefaultFeatures = false; + } + { + name = "futures-sink"; + packageId = "futures-sink"; + usesDefaultFeatures = false; + } + { + name = "futures-util"; + packageId = "futures-util"; + usesDefaultFeatures = false; + } + { + name = "http"; + packageId = "http"; + } + { + name = "indexmap"; + packageId = "indexmap"; + features = [ "std" ]; + } + { + name = "slab"; + packageId = "slab"; + } + { + name = "tokio"; + packageId = "tokio"; + features = [ "io-util" ]; + } + { + name = "tokio-util"; + packageId = "tokio-util"; + features = [ "codec" ]; + } + { + name = "tracing"; + packageId = "tracing"; + usesDefaultFeatures = false; + features = [ "std" ]; + } + ]; + devDependencies = [ + { + name = "tokio"; + packageId = "tokio"; + features = [ "rt-multi-thread" "macros" "sync" "net" ]; + } + ]; + features = { }; + }; + "half" = rec { + crateName = "half"; + version = "1.8.2"; + edition = "2018"; + sha256 = "1mqbmx2m9qd4lslkb42fzgldsklhv9c4bxsc8j82r80d8m24mfza"; + authors = [ + "Kathryn Long <squeeself@gmail.com>" + ]; + features = { + "bytemuck" = [ "dep:bytemuck" ]; + "num-traits" = [ "dep:num-traits" ]; + "serde" = [ "dep:serde" ]; + "serialize" = [ "serde" ]; + "std" = [ "alloc" ]; + "zerocopy" = [ "dep:zerocopy" ]; + }; + }; + "hashbrown" = rec { + crateName = "hashbrown"; + version = "0.12.3"; + edition = "2021"; + sha256 = "1268ka4750pyg2pbgsr43f0289l5zah4arir2k4igx5a8c6fg7la"; + authors = [ + "Amanieu d'Antras <amanieu@gmail.com>" + ]; + features = { + "ahash" = [ "dep:ahash" ]; + "ahash-compile-time-rng" = [ "ahash/compile-time-rng" ]; + "alloc" = [ "dep:alloc" ]; + "bumpalo" = [ "dep:bumpalo" ]; + "compiler_builtins" = [ "dep:compiler_builtins" ]; + "core" = [ "dep:core" ]; + "default" = [ "ahash" "inline-more" ]; + "rayon" = [ "dep:rayon" ]; + "rustc-dep-of-std" = [ "nightly" "core" "compiler_builtins" "alloc" "rustc-internal-api" ]; + "serde" = [ "dep:serde" ]; + }; + resolvedDefaultFeatures = [ "inline-more" "raw" ]; + }; + "heck" = rec { + crateName = "heck"; + version = "0.4.1"; + edition = "2018"; + sha256 = "1a7mqsnycv5z4z5vnv1k34548jzmc0ajic7c1j8jsaspnhw5ql4m"; + authors = [ + "Without Boats <woboats@gmail.com>" + ]; + features = { + "unicode" = [ "unicode-segmentation" ]; + "unicode-segmentation" = [ "dep:unicode-segmentation" ]; + }; + resolvedDefaultFeatures = [ "default" ]; + }; + "hermit-abi 0.1.19" = rec { + crateName = "hermit-abi"; + version = "0.1.19"; + edition = "2018"; + sha256 = "0cxcm8093nf5fyn114w8vxbrbcyvv91d4015rdnlgfll7cs6gd32"; + authors = [ + "Stefan Lankes" + ]; + dependencies = [ + { + name = "libc"; + packageId = "libc"; + usesDefaultFeatures = false; + } + ]; + features = { + "compiler_builtins" = [ "dep:compiler_builtins" ]; + "core" = [ "dep:core" ]; + "rustc-dep-of-std" = [ "core" "compiler_builtins/rustc-dep-of-std" "libc/rustc-dep-of-std" ]; + }; + resolvedDefaultFeatures = [ "default" ]; + }; + "hermit-abi 0.2.6" = rec { + crateName = "hermit-abi"; + version = "0.2.6"; + edition = "2021"; + sha256 = "1iz439yz9qzk3rh9pqx2rz5c4107v3qbd7bppfsbzb1mzr02clgf"; + authors = [ + "Stefan Lankes" + ]; + dependencies = [ + { + name = "libc"; + packageId = "libc"; + usesDefaultFeatures = false; + } + ]; + features = { + "alloc" = [ "dep:alloc" ]; + "compiler_builtins" = [ "dep:compiler_builtins" ]; + "core" = [ "dep:core" ]; + "rustc-dep-of-std" = [ "core" "alloc" "compiler_builtins/rustc-dep-of-std" "libc/rustc-dep-of-std" ]; + }; + resolvedDefaultFeatures = [ "default" ]; + }; + "hermit-abi 0.3.1" = rec { + crateName = "hermit-abi"; + version = "0.3.1"; + edition = "2021"; + sha256 = "11j2v3q58kmi5mhjvh6hfrb7il2yzg7gmdf5lpwnwwv6qj04im7y"; + authors = [ + "Stefan Lankes" + ]; + features = { + "alloc" = [ "dep:alloc" ]; + "compiler_builtins" = [ "dep:compiler_builtins" ]; + "core" = [ "dep:core" ]; + "rustc-dep-of-std" = [ "core" "alloc" "compiler_builtins/rustc-dep-of-std" ]; + }; + resolvedDefaultFeatures = [ "default" ]; + }; + "hex" = rec { + crateName = "hex"; + version = "0.3.2"; + edition = "2015"; + sha256 = "0xsdcjiik5j750j67zk42qdnmm4ahirk3gmkmcqgq7qls2jjcl40"; + authors = [ + "KokaKiwi <kokakiwi@kokakiwi.net>" + ]; + features = { }; + }; + "http" = rec { + crateName = "http"; + version = "0.2.9"; + edition = "2018"; + sha256 = "10j4jjpngaymxjvi92hllr2y6acr09pq61cvzxd44qzvkb4zyvmx"; + authors = [ + "Alex Crichton <alex@alexcrichton.com>" + "Carl Lerche <me@carllerche.com>" + "Sean McArthur <sean@seanmonstar.com>" + ]; + dependencies = [ + { + name = "bytes"; + packageId = "bytes"; + } + { + name = "fnv"; + packageId = "fnv"; + } + { + name = "itoa"; + packageId = "itoa"; + } + ]; + + }; + "http-body" = rec { + crateName = "http-body"; + version = "0.4.5"; + edition = "2018"; + sha256 = "1l967qwwlvhp198xdrnc0p5d7jwfcp6q2lm510j6zqw4s4b8zwym"; + authors = [ + "Carl Lerche <me@carllerche.com>" + "Lucio Franco <luciofranco14@gmail.com>" + "Sean McArthur <sean@seanmonstar.com>" + ]; + dependencies = [ + { + name = "bytes"; + packageId = "bytes"; + } + { + name = "http"; + packageId = "http"; + } + { + name = "pin-project-lite"; + packageId = "pin-project-lite"; + } + ]; + + }; + "httparse" = rec { + crateName = "httparse"; + version = "1.8.0"; + edition = "2018"; + sha256 = "010rrfahm1jss3p022fqf3j3jmm72vhn4iqhykahb9ynpaag75yq"; + authors = [ + "Sean McArthur <sean@seanmonstar.com>" + ]; + features = { + "default" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "httpdate" = rec { + crateName = "httpdate"; + version = "1.0.2"; + edition = "2018"; + sha256 = "08bln7b1ibdw26gl8h4dr6rlybvlkyhlha309xbh9ghxh9nf78f4"; + authors = [ + "Pyfisch <pyfisch@posteo.org>" + ]; + + }; + "hyper" = rec { + crateName = "hyper"; + version = "0.14.26"; + edition = "2018"; + sha256 = "1m27s4p6kv5rbzqkw2lzfs60fwz7wym97zri0j8kn6pilrr2sc5b"; + authors = [ + "Sean McArthur <sean@seanmonstar.com>" + ]; + dependencies = [ + { + name = "bytes"; + packageId = "bytes"; + } + { + name = "futures-channel"; + packageId = "futures-channel"; + } + { + name = "futures-core"; + packageId = "futures-core"; + usesDefaultFeatures = false; + } + { + name = "futures-util"; + packageId = "futures-util"; + usesDefaultFeatures = false; + } + { + name = "h2"; + packageId = "h2"; + optional = true; + } + { + name = "http"; + packageId = "http"; + } + { + name = "http-body"; + packageId = "http-body"; + } + { + name = "httparse"; + packageId = "httparse"; + } + { + name = "httpdate"; + packageId = "httpdate"; + } + { + name = "itoa"; + packageId = "itoa"; + } + { + name = "pin-project-lite"; + packageId = "pin-project-lite"; + } + { + name = "socket2"; + packageId = "socket2"; + optional = true; + features = [ "all" ]; + } + { + name = "tokio"; + packageId = "tokio"; + features = [ "sync" ]; + } + { + name = "tower-service"; + packageId = "tower-service"; + } + { + name = "tracing"; + packageId = "tracing"; + usesDefaultFeatures = false; + features = [ "std" ]; + } + { + name = "want"; + packageId = "want"; + } + ]; + devDependencies = [ + { + name = "futures-util"; + packageId = "futures-util"; + usesDefaultFeatures = false; + features = [ "alloc" ]; + } + { + name = "tokio"; + packageId = "tokio"; + features = [ "fs" "macros" "io-std" "io-util" "rt" "rt-multi-thread" "sync" "time" "test-util" ]; + } + ]; + features = { + "ffi" = [ "libc" ]; + "full" = [ "client" "http1" "http2" "server" "stream" "runtime" ]; + "h2" = [ "dep:h2" ]; + "http2" = [ "h2" ]; + "libc" = [ "dep:libc" ]; + "runtime" = [ "tcp" "tokio/rt" "tokio/time" ]; + "socket2" = [ "dep:socket2" ]; + "tcp" = [ "socket2" "tokio/net" "tokio/rt" "tokio/time" ]; + }; + resolvedDefaultFeatures = [ "client" "default" "full" "h2" "http1" "http2" "runtime" "server" "socket2" "stream" "tcp" ]; + }; + "hyper-timeout" = rec { + crateName = "hyper-timeout"; + version = "0.4.1"; + edition = "2018"; + sha256 = "1c8k3g8k2yh1gxvsx9p7amkimgxhl9kafwpj7jyf8ywc5r45ifdv"; + authors = [ + "Herman J. Radtke III <herman@hermanradtke.com>" + ]; + dependencies = [ + { + name = "hyper"; + packageId = "hyper"; + features = [ "client" ]; + } + { + name = "pin-project-lite"; + packageId = "pin-project-lite"; + } + { + name = "tokio"; + packageId = "tokio"; + } + { + name = "tokio-io-timeout"; + packageId = "tokio-io-timeout"; + } + ]; + devDependencies = [ + { + name = "hyper"; + packageId = "hyper"; + features = [ "client" "http1" "tcp" ]; + } + { + name = "tokio"; + packageId = "tokio"; + features = [ "io-std" "io-util" "macros" ]; + } + ]; + + }; + "imbl" = rec { + crateName = "imbl"; + version = "2.0.0"; + edition = "2018"; + sha256 = "03fvbk1g1pqs6j77g76vq5klqi6bx9jl9di782268ilzrmlnp062"; + authors = [ + "Bodil Stokke <bodil@bodil.org>" + "Joe Neeman <joeneeman@gmail.com>" + ]; + dependencies = [ + { + name = "bitmaps"; + packageId = "bitmaps"; + } + { + name = "imbl-sized-chunks"; + packageId = "imbl-sized-chunks"; + } + { + name = "proptest"; + packageId = "proptest"; + optional = true; + } + { + name = "rand_core"; + packageId = "rand_core 0.6.4"; + } + { + name = "rand_xoshiro"; + packageId = "rand_xoshiro"; + } + { + name = "serde"; + packageId = "serde"; + optional = true; + } + ]; + buildDependencies = [ + { + name = "version_check"; + packageId = "version_check"; + } + ]; + devDependencies = [ + { + name = "proptest"; + packageId = "proptest"; + } + { + name = "serde"; + packageId = "serde"; + } + ]; + features = { + "arbitrary" = [ "dep:arbitrary" ]; + "proptest" = [ "dep:proptest" ]; + "quickcheck" = [ "dep:quickcheck" ]; + "rayon" = [ "dep:rayon" ]; + "refpool" = [ "dep:refpool" ]; + "serde" = [ "dep:serde" ]; + }; + resolvedDefaultFeatures = [ "proptest" "serde" ]; + }; + "imbl-sized-chunks" = rec { + crateName = "imbl-sized-chunks"; + version = "0.1.1"; + edition = "2021"; + sha256 = "0xhhmb7aldl92hxkmsx10n59zxsa0hw4bvykc6jmq72lnah7x5g6"; + authors = [ + "Bodil Stokke <bodil@bodil.org>" + "Joe Neeman <joeneeman@gmail.com>" + ]; + dependencies = [ + { + name = "bitmaps"; + packageId = "bitmaps"; + usesDefaultFeatures = false; + } + ]; + features = { + "arbitrary" = [ "dep:arbitrary" ]; + "array-ops" = [ "dep:array-ops" ]; + "default" = [ "std" ]; + "refpool" = [ "dep:refpool" ]; + "ringbuffer" = [ "array-ops" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "indexmap" = rec { + crateName = "indexmap"; + version = "1.9.3"; + edition = "2021"; + sha256 = "16dxmy7yvk51wvnih3a3im6fp5lmx0wx76i03n06wyak6cwhw1xx"; + dependencies = [ + { + name = "hashbrown"; + packageId = "hashbrown"; + usesDefaultFeatures = false; + features = [ "raw" ]; + } + ]; + buildDependencies = [ + { + name = "autocfg"; + packageId = "autocfg"; + } + ]; + features = { + "arbitrary" = [ "dep:arbitrary" ]; + "quickcheck" = [ "dep:quickcheck" ]; + "rayon" = [ "dep:rayon" ]; + "rustc-rayon" = [ "dep:rustc-rayon" ]; + "serde" = [ "dep:serde" ]; + "serde-1" = [ "serde" ]; + }; + resolvedDefaultFeatures = [ "std" ]; + }; + "instant" = rec { + crateName = "instant"; + version = "0.1.12"; + edition = "2018"; + sha256 = "0b2bx5qdlwayriidhrag8vhy10kdfimfhmb3jnjmsz2h9j1bwnvs"; + authors = [ + "sebcrozet <developer@crozet.re>" + ]; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + ]; + features = { + "js-sys" = [ "dep:js-sys" ]; + "stdweb" = [ "dep:stdweb" ]; + "wasm-bindgen" = [ "js-sys" "wasm-bindgen_rs" "web-sys" ]; + "wasm-bindgen_rs" = [ "dep:wasm-bindgen_rs" ]; + "web-sys" = [ "dep:web-sys" ]; + }; + }; + "io-lifetimes" = rec { + crateName = "io-lifetimes"; + version = "1.0.10"; + edition = "2018"; + sha256 = "08625nsz0lgbd7c9lly6b6l45viqpsnj9jbsixd9mrz7596wfrlw"; + authors = [ + "Dan Gohman <dev@sunfishcode.online>" + ]; + dependencies = [ + { + name = "hermit-abi"; + packageId = "hermit-abi 0.3.1"; + optional = true; + target = { target, features }: ("hermit" == target."os"); + } + { + name = "libc"; + packageId = "libc"; + optional = true; + target = { target, features }: (!(target."windows" or false)); + } + { + name = "windows-sys"; + packageId = "windows-sys 0.48.0"; + optional = true; + target = { target, features }: (target."windows" or false); + features = [ "Win32_Foundation" "Win32_Storage_FileSystem" "Win32_Networking_WinSock" "Win32_Security" "Win32_System_IO" "Win32_System_Threading" ]; + } + ]; + features = { + "async-std" = [ "dep:async-std" ]; + "close" = [ "libc" "hermit-abi" "windows-sys" ]; + "default" = [ "close" ]; + "fs-err" = [ "dep:fs-err" ]; + "hermit-abi" = [ "dep:hermit-abi" ]; + "libc" = [ "dep:libc" ]; + "mio" = [ "dep:mio" ]; + "os_pipe" = [ "dep:os_pipe" ]; + "socket2" = [ "dep:socket2" ]; + "tokio" = [ "dep:tokio" ]; + "windows-sys" = [ "dep:windows-sys" ]; + }; + resolvedDefaultFeatures = [ "close" "default" "hermit-abi" "libc" "windows-sys" ]; + }; + "is-terminal" = rec { + crateName = "is-terminal"; + version = "0.4.7"; + edition = "2018"; + sha256 = "07xyfla3f2jjb666s72la5jvl9zq7mixbqkjvyfi5j018rhr7kxd"; + authors = [ + "softprops <d.tangren@gmail.com>" + "Dan Gohman <dev@sunfishcode.online>" + ]; + dependencies = [ + { + name = "hermit-abi"; + packageId = "hermit-abi 0.3.1"; + target = { target, features }: ("hermit" == target."os"); + } + { + name = "io-lifetimes"; + packageId = "io-lifetimes"; + } + { + name = "rustix"; + packageId = "rustix"; + target = { target, features }: (!((target."windows" or false) || ("hermit" == target."os") || ("unknown" == target."os"))); + features = [ "termios" ]; + } + { + name = "windows-sys"; + packageId = "windows-sys 0.48.0"; + target = { target, features }: (target."windows" or false); + features = [ "Win32_Foundation" "Win32_Storage_FileSystem" "Win32_System_Console" ]; + } + ]; + + }; + "itertools" = rec { + crateName = "itertools"; + version = "0.10.5"; + edition = "2018"; + sha256 = "0ww45h7nxx5kj6z2y6chlskxd1igvs4j507anr6dzg99x1h25zdh"; + authors = [ + "bluss" + ]; + dependencies = [ + { + name = "either"; + packageId = "either"; + usesDefaultFeatures = false; + } + ]; + features = { + "default" = [ "use_std" ]; + "use_std" = [ "use_alloc" "either/use_std" ]; + }; + resolvedDefaultFeatures = [ "default" "use_alloc" "use_std" ]; + }; + "itoa" = rec { + crateName = "itoa"; + version = "1.0.6"; + edition = "2018"; + sha256 = "19jc2sa3wvdc29zhgbwf3bayikq4rq18n20dbyg9ahd4hbsxjfj5"; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + ]; + features = { + "no-panic" = [ "dep:no-panic" ]; + }; + }; + "jobserver" = rec { + crateName = "jobserver"; + version = "0.1.26"; + edition = "2018"; + sha256 = "1hkprvh1zp5s3qwjjwwhw7rcpivczcbf6q60rcxr0m8158hzsv4k"; + authors = [ + "Alex Crichton <alex@alexcrichton.com>" + ]; + dependencies = [ + { + name = "libc"; + packageId = "libc"; + target = { target, features }: (target."unix" or false); + } + ]; + + }; + "js-sys" = rec { + crateName = "js-sys"; + version = "0.3.61"; + edition = "2018"; + sha256 = "0c075apyc5fxp2sbgr87qcvq53pcjxmp05l47lzlhpn5a0hxwpa4"; + authors = [ + "The wasm-bindgen Developers" + ]; + dependencies = [ + { + name = "wasm-bindgen"; + packageId = "wasm-bindgen"; + } + ]; + + }; + "lazy_static" = rec { + crateName = "lazy_static"; + version = "1.4.0"; + edition = "2015"; + sha256 = "0in6ikhw8mgl33wjv6q6xfrb5b9jr16q8ygjy803fay4zcisvaz2"; + authors = [ + "Marvin Löbel <loebel.marvin@gmail.com>" + ]; + features = { + "spin" = [ "dep:spin" ]; + "spin_no_std" = [ "spin" ]; + }; + }; + "lexical-core" = rec { + crateName = "lexical-core"; + version = "0.8.5"; + edition = "2018"; + sha256 = "0ihf0x3vrk25fq3bv9q35m0xax0wmvwkh0j0pjm2yk4ddvh5vpic"; + authors = [ + "Alex Huszagh <ahuszagh@gmail.com>" + ]; + dependencies = [ + { + name = "lexical-parse-float"; + packageId = "lexical-parse-float"; + optional = true; + usesDefaultFeatures = false; + } + { + name = "lexical-parse-integer"; + packageId = "lexical-parse-integer"; + optional = true; + usesDefaultFeatures = false; + } + { + name = "lexical-util"; + packageId = "lexical-util"; + usesDefaultFeatures = false; + } + { + name = "lexical-write-float"; + packageId = "lexical-write-float"; + optional = true; + usesDefaultFeatures = false; + } + { + name = "lexical-write-integer"; + packageId = "lexical-write-integer"; + optional = true; + usesDefaultFeatures = false; + } + ]; + features = { + "compact" = [ "lexical-write-integer/compact" "lexical-write-float/compact" "lexical-parse-integer/compact" "lexical-parse-float/compact" ]; + "default" = [ "std" "write-integers" "write-floats" "parse-integers" "parse-floats" ]; + "f128" = [ "lexical-util/f128" "lexical-parse-float/f128" "lexical-write-float/f128" ]; + "f16" = [ "lexical-util/f16" "lexical-parse-float/f16" "lexical-write-float/f16" ]; + "format" = [ "lexical-util/format" "lexical-parse-integer/format" "lexical-parse-float/format" "lexical-write-integer/format" "lexical-write-float/format" ]; + "lexical-parse-float" = [ "dep:lexical-parse-float" ]; + "lexical-parse-integer" = [ "dep:lexical-parse-integer" ]; + "lexical-write-float" = [ "dep:lexical-write-float" ]; + "lexical-write-integer" = [ "dep:lexical-write-integer" ]; + "lint" = [ "lexical-util/lint" "lexical-write-integer/lint" "lexical-write-float/lint" "lexical-parse-integer/lint" "lexical-parse-float/lint" ]; + "nightly" = [ "lexical-write-integer/nightly" "lexical-write-float/nightly" "lexical-parse-integer/nightly" "lexical-parse-float/nightly" ]; + "parse-floats" = [ "lexical-parse-float" "parse" "floats" ]; + "parse-integers" = [ "lexical-parse-integer" "parse" "integers" ]; + "power-of-two" = [ "lexical-util/power-of-two" "lexical-write-integer/power-of-two" "lexical-write-float/power-of-two" "lexical-parse-integer/power-of-two" "lexical-parse-float/power-of-two" ]; + "radix" = [ "lexical-util/radix" "lexical-write-integer/radix" "lexical-write-float/radix" "lexical-parse-integer/radix" "lexical-parse-float/radix" ]; + "safe" = [ "lexical-write-integer/safe" "lexical-write-float/safe" "lexical-parse-integer/safe" "lexical-parse-float/safe" ]; + "std" = [ "lexical-util/std" "lexical-write-integer/std" "lexical-write-float/std" "lexical-parse-integer/std" "lexical-parse-float/std" ]; + "write-floats" = [ "lexical-write-float" "write" "floats" ]; + "write-integers" = [ "lexical-write-integer" "write" "integers" ]; + }; + resolvedDefaultFeatures = [ "default" "floats" "format" "integers" "lexical-parse-float" "lexical-parse-integer" "lexical-write-float" "lexical-write-integer" "parse" "parse-floats" "parse-integers" "std" "write" "write-floats" "write-integers" ]; + }; + "lexical-parse-float" = rec { + crateName = "lexical-parse-float"; + version = "0.8.5"; + edition = "2018"; + sha256 = "0py0gp8hlzcrlvjqmqlpl2v1as65iiqxq2xsabxvhc01pmg3lfv8"; + authors = [ + "Alex Huszagh <ahuszagh@gmail.com>" + ]; + dependencies = [ + { + name = "lexical-parse-integer"; + packageId = "lexical-parse-integer"; + usesDefaultFeatures = false; + } + { + name = "lexical-util"; + packageId = "lexical-util"; + usesDefaultFeatures = false; + features = [ "parse-floats" ]; + } + { + name = "static_assertions"; + packageId = "static_assertions"; + } + ]; + features = { + "compact" = [ "lexical-util/compact" "lexical-parse-integer/compact" ]; + "default" = [ "std" ]; + "f128" = [ "lexical-util/f128" ]; + "f16" = [ "lexical-util/f16" ]; + "format" = [ "lexical-util/format" "lexical-parse-integer/format" ]; + "lint" = [ "lexical-util/lint" "lexical-parse-integer/lint" ]; + "nightly" = [ "lexical-parse-integer/nightly" ]; + "power-of-two" = [ "lexical-util/power-of-two" "lexical-parse-integer/power-of-two" ]; + "radix" = [ "lexical-util/radix" "lexical-parse-integer/radix" "power-of-two" ]; + "safe" = [ "lexical-parse-integer/safe" ]; + "std" = [ "lexical-util/std" "lexical-parse-integer/std" ]; + }; + resolvedDefaultFeatures = [ "format" "std" ]; + }; + "lexical-parse-integer" = rec { + crateName = "lexical-parse-integer"; + version = "0.8.6"; + edition = "2018"; + sha256 = "1sayji3mpvb2xsjq56qcq3whfz8px9a6fxk5v7v15hyhbr4982bd"; + authors = [ + "Alex Huszagh <ahuszagh@gmail.com>" + ]; + dependencies = [ + { + name = "lexical-util"; + packageId = "lexical-util"; + usesDefaultFeatures = false; + features = [ "parse-integers" ]; + } + { + name = "static_assertions"; + packageId = "static_assertions"; + } + ]; + features = { + "compact" = [ "lexical-util/compact" ]; + "default" = [ "std" ]; + "format" = [ "lexical-util/format" ]; + "lint" = [ "lexical-util/lint" ]; + "power-of-two" = [ "lexical-util/power-of-two" ]; + "radix" = [ "lexical-util/radix" "power-of-two" ]; + "std" = [ "lexical-util/std" ]; + }; + resolvedDefaultFeatures = [ "format" "std" ]; + }; + "lexical-util" = rec { + crateName = "lexical-util"; + version = "0.8.5"; + edition = "2018"; + sha256 = "1z73qkv7yxhsbc4aiginn1dqmsj8jarkrdlyxc88g2gz2vzvjmaj"; + authors = [ + "Alex Huszagh <ahuszagh@gmail.com>" + ]; + dependencies = [ + { + name = "static_assertions"; + packageId = "static_assertions"; + } + ]; + features = { + "default" = [ "std" ]; + "f128" = [ "floats" ]; + "f16" = [ "floats" ]; + "parse-floats" = [ "parse" "floats" ]; + "parse-integers" = [ "parse" "integers" ]; + "radix" = [ "power-of-two" ]; + "write-floats" = [ "write" "floats" ]; + "write-integers" = [ "write" "integers" ]; + }; + resolvedDefaultFeatures = [ "floats" "format" "integers" "parse" "parse-floats" "parse-integers" "std" "write" "write-floats" "write-integers" ]; + }; + "lexical-write-float" = rec { + crateName = "lexical-write-float"; + version = "0.8.5"; + edition = "2018"; + sha256 = "0qk825l0csvnksh9sywb51996cjc2bylq6rxjaiha7sqqjhvmjmc"; + authors = [ + "Alex Huszagh <ahuszagh@gmail.com>" + ]; + dependencies = [ + { + name = "lexical-util"; + packageId = "lexical-util"; + usesDefaultFeatures = false; + features = [ "write-floats" ]; + } + { + name = "lexical-write-integer"; + packageId = "lexical-write-integer"; + usesDefaultFeatures = false; + } + { + name = "static_assertions"; + packageId = "static_assertions"; + } + ]; + features = { + "compact" = [ "lexical-util/compact" "lexical-write-integer/compact" ]; + "default" = [ "std" ]; + "f128" = [ "lexical-util/f128" ]; + "f16" = [ "lexical-util/f16" ]; + "format" = [ "lexical-util/format" ]; + "lint" = [ "lexical-util/lint" "lexical-write-integer/lint" ]; + "nightly" = [ "lexical-write-integer/nightly" ]; + "power-of-two" = [ "lexical-util/power-of-two" "lexical-write-integer/power-of-two" ]; + "radix" = [ "lexical-util/radix" "lexical-write-integer/radix" "power-of-two" ]; + "safe" = [ "lexical-write-integer/safe" ]; + "std" = [ "lexical-util/std" "lexical-write-integer/std" ]; + }; + resolvedDefaultFeatures = [ "format" "std" ]; + }; + "lexical-write-integer" = rec { + crateName = "lexical-write-integer"; + version = "0.8.5"; + edition = "2018"; + sha256 = "0ii4hmvqrg6pd4j9y1pkhkp0nw2wpivjzmljh6v6ca22yk8z7dp1"; + authors = [ + "Alex Huszagh <ahuszagh@gmail.com>" + ]; + dependencies = [ + { + name = "lexical-util"; + packageId = "lexical-util"; + usesDefaultFeatures = false; + features = [ "write-integers" ]; + } + { + name = "static_assertions"; + packageId = "static_assertions"; + } + ]; + features = { + "compact" = [ "lexical-util/compact" ]; + "default" = [ "std" ]; + "format" = [ "lexical-util/format" ]; + "lint" = [ "lexical-util/lint" ]; + "power-of-two" = [ "lexical-util/power-of-two" ]; + "radix" = [ "lexical-util/radix" "power-of-two" ]; + "std" = [ "lexical-util/std" ]; + }; + resolvedDefaultFeatures = [ "format" "std" ]; + }; + "libc" = rec { + crateName = "libc"; + version = "0.2.143"; + edition = "2015"; + sha256 = "0940zabsl3gm2jr03mg2ign4rnakdd4rp5hfhjzddic57s4hghpd"; + authors = [ + "The Rust Project Developers" + ]; + features = { + "default" = [ "std" ]; + "rustc-dep-of-std" = [ "align" "rustc-std-workspace-core" ]; + "rustc-std-workspace-core" = [ "dep:rustc-std-workspace-core" ]; + "use_std" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "default" "extra_traits" "std" ]; + }; + "libm" = rec { + crateName = "libm"; + version = "0.2.6"; + edition = "2018"; + sha256 = "1ywg7jfcgfv4jypxi3f6rpf7n9509ky695bfzy1fqhms7ymhi09l"; + authors = [ + "Jorge Aparicio <jorge@japaric.io>" + ]; + features = { + "musl-reference-tests" = [ "rand" ]; + "rand" = [ "dep:rand" ]; + }; + resolvedDefaultFeatures = [ "default" ]; + }; + "linux-raw-sys" = rec { + crateName = "linux-raw-sys"; + version = "0.3.7"; + edition = "2018"; + sha256 = "17s7qr5h82blrxy29014zzhr30jcxcjc8r16v2p31rzcfal7xsgc"; + authors = [ + "Dan Gohman <dev@sunfishcode.online>" + ]; + features = { + "compiler_builtins" = [ "dep:compiler_builtins" ]; + "core" = [ "dep:core" ]; + "default" = [ "std" "general" "errno" ]; + "rustc-dep-of-std" = [ "core" "compiler_builtins" "no_std" ]; + }; + resolvedDefaultFeatures = [ "errno" "general" "ioctl" "no_std" ]; + }; + "lock_api" = rec { + crateName = "lock_api"; + version = "0.4.9"; + edition = "2018"; + sha256 = "1py41vk243hwk345nhkn5nw0bd4m03gzjmprdjqq6rg5dwv12l23"; + authors = [ + "Amanieu d'Antras <amanieu@gmail.com>" + ]; + dependencies = [ + { + name = "scopeguard"; + packageId = "scopeguard"; + usesDefaultFeatures = false; + } + ]; + buildDependencies = [ + { + name = "autocfg"; + packageId = "autocfg"; + } + ]; + features = { + "owning_ref" = [ "dep:owning_ref" ]; + "serde" = [ "dep:serde" ]; + }; + }; + "log" = rec { + crateName = "log"; + version = "0.4.17"; + edition = "2015"; + sha256 = "0biqlaaw1lsr8bpnmbcc0fvgjj34yy79ghqzyi0ali7vgil2xcdb"; + authors = [ + "The Rust Project Developers" + ]; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + ]; + features = { + "kv_unstable" = [ "value-bag" ]; + "kv_unstable_serde" = [ "kv_unstable_std" "value-bag/serde" "serde" ]; + "kv_unstable_std" = [ "std" "kv_unstable" "value-bag/error" ]; + "kv_unstable_sval" = [ "kv_unstable" "value-bag/sval" "sval" ]; + "serde" = [ "dep:serde" ]; + "sval" = [ "dep:sval" ]; + "value-bag" = [ "dep:value-bag" ]; + }; + resolvedDefaultFeatures = [ "std" ]; + }; + "matchit" = rec { + crateName = "matchit"; + version = "0.7.0"; + edition = "2021"; + sha256 = "0h7a1a57wamz0305dipj20shv2b5dw47jjp6dsgfaxmpmznlhwmq"; + authors = [ + "Ibraheem Ahmed <ibraheem@ibraheem.ca>" + ]; + features = { }; + resolvedDefaultFeatures = [ "default" ]; + }; + "memchr" = rec { + crateName = "memchr"; + version = "2.5.0"; + edition = "2018"; + sha256 = "0vanfk5mzs1g1syqnj03q8n0syggnhn55dq535h2wxr7rwpfbzrd"; + authors = [ + "Andrew Gallant <jamslam@gmail.com>" + "bluss" + ]; + features = { + "compiler_builtins" = [ "dep:compiler_builtins" ]; + "core" = [ "dep:core" ]; + "default" = [ "std" ]; + "libc" = [ "dep:libc" ]; + "rustc-dep-of-std" = [ "core" "compiler_builtins" ]; + "use_std" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "memoffset" = rec { + crateName = "memoffset"; + version = "0.8.0"; + edition = "2015"; + sha256 = "1qcdic88dhgw76pafgndpz04pig8il4advq978mxdxdwrydp276n"; + authors = [ + "Gilad Naaman <gilad.naaman@gmail.com>" + ]; + buildDependencies = [ + { + name = "autocfg"; + packageId = "autocfg"; + } + ]; + features = { }; + resolvedDefaultFeatures = [ "default" ]; + }; + "mime" = rec { + crateName = "mime"; + version = "0.3.17"; + edition = "2015"; + sha256 = "16hkibgvb9klh0w0jk5crr5xv90l3wlf77ggymzjmvl1818vnxv8"; + authors = [ + "Sean McArthur <sean@seanmonstar.com>" + ]; + + }; + "mio" = rec { + crateName = "mio"; + version = "0.8.6"; + edition = "2018"; + sha256 = "1ygx5chq81k3vk2bx722xwcwf2qydmm337jsnijgzd7mxx39m7av"; + authors = [ + "Carl Lerche <me@carllerche.com>" + "Thomas de Zeeuw <thomasdezeeuw@gmail.com>" + "Tokio Contributors <team@tokio.rs>" + ]; + dependencies = [ + { + name = "libc"; + packageId = "libc"; + target = { target, features }: ("wasi" == target."os"); + } + { + name = "libc"; + packageId = "libc"; + target = { target, features }: (target."unix" or false); + } + { + name = "log"; + packageId = "log"; + } + { + name = "wasi"; + packageId = "wasi"; + target = { target, features }: ("wasi" == target."os"); + } + { + name = "windows-sys"; + packageId = "windows-sys 0.45.0"; + target = { target, features }: (target."windows" or false); + features = [ "Win32_Foundation" "Win32_Networking_WinSock" "Win32_Storage_FileSystem" "Win32_System_IO" "Win32_System_WindowsProgramming" ]; + } + ]; + features = { + "os-ext" = [ "os-poll" "windows-sys/Win32_System_Pipes" "windows-sys/Win32_Security" ]; + }; + resolvedDefaultFeatures = [ "default" "net" "os-ext" "os-poll" ]; + }; + "multimap" = rec { + crateName = "multimap"; + version = "0.8.3"; + edition = "2015"; + sha256 = "0sicyz4n500vdhgcxn4g8jz97cp1ijir1rnbgph3pmx9ckz4dkp5"; + authors = [ + "Håvar Nøvik <havar.novik@gmail.com>" + ]; + features = { + "default" = [ "serde_impl" ]; + "serde" = [ "dep:serde" ]; + "serde_impl" = [ "serde" ]; + }; + }; + "nibble_vec" = rec { + crateName = "nibble_vec"; + version = "0.1.0"; + edition = "2018"; + sha256 = "0hsdp3s724s30hkqz74ky6sqnadhp2xwcj1n1hzy4vzkz4yxi9bp"; + authors = [ + "Michael Sproul <micsproul@gmail.com>" + ]; + dependencies = [ + { + name = "smallvec"; + packageId = "smallvec"; + } + ]; + + }; + "nix" = rec { + crateName = "nix"; + version = "0.25.1"; + edition = "2018"; + sha256 = "1r4vyp5g1lxzpig31bkrhxdf2bggb4nvk405x5gngzfvwxqgyipk"; + authors = [ + "The nix-rust Project Developers" + ]; + dependencies = [ + { + name = "bitflags"; + packageId = "bitflags"; + } + { + name = "cfg-if"; + packageId = "cfg-if"; + } + { + name = "libc"; + packageId = "libc"; + features = [ "extra_traits" ]; + } + ]; + buildDependencies = [ + { + name = "autocfg"; + packageId = "autocfg"; + } + ]; + features = { + "aio" = [ "pin-utils" ]; + "default" = [ "acct" "aio" "dir" "env" "event" "feature" "fs" "hostname" "inotify" "ioctl" "kmod" "mman" "mount" "mqueue" "net" "personality" "poll" "process" "pthread" "ptrace" "quota" "reboot" "resource" "sched" "signal" "socket" "term" "time" "ucontext" "uio" "user" "zerocopy" ]; + "dir" = [ "fs" ]; + "memoffset" = [ "dep:memoffset" ]; + "mount" = [ "uio" ]; + "mqueue" = [ "fs" ]; + "net" = [ "socket" ]; + "pin-utils" = [ "dep:pin-utils" ]; + "ptrace" = [ "process" ]; + "sched" = [ "process" ]; + "signal" = [ "process" ]; + "socket" = [ "memoffset" ]; + "ucontext" = [ "signal" ]; + "user" = [ "feature" ]; + "zerocopy" = [ "fs" "uio" ]; + }; + resolvedDefaultFeatures = [ "fs" "ioctl" "poll" "process" "signal" "term" ]; + }; + "nix-cli" = rec { + crateName = "nix-cli"; + version = "0.1.0"; + edition = "2021"; + crateBin = [ + { + name = "nix-cli"; + path = "src/main.rs"; + requiredFeatures = [ ]; + } + { + name = "nix-store"; + path = "src/bin/nix-store.rs"; + requiredFeatures = [ ]; + } + ]; + # We can't filter paths with references in Nix 2.4 + # See https://github.com/NixOS/nix/issues/5410 + src = + if (lib.versionOlder builtins.nixVersion "2.4pre20211007") + then lib.cleanSourceWith { filter = sourceFilter; src = ./nix_cli; } + else ./nix_cli; + dependencies = [ + { + name = "clap"; + packageId = "clap 4.2.7"; + } + { + name = "tempfile"; + packageId = "tempfile"; + } + ]; + features = { }; + resolvedDefaultFeatures = [ "integration_tests" ]; + }; + "nix-compat" = rec { + crateName = "nix-compat"; + version = "0.1.0"; + edition = "2021"; + # We can't filter paths with references in Nix 2.4 + # See https://github.com/NixOS/nix/issues/5410 + src = + if (lib.versionOlder builtins.nixVersion "2.4pre20211007") + then lib.cleanSourceWith { filter = sourceFilter; src = ./nix-compat; } + else ./nix-compat; + dependencies = [ + { + name = "anyhow"; + packageId = "anyhow"; + } + { + name = "data-encoding"; + packageId = "data-encoding"; + } + { + name = "glob"; + packageId = "glob"; + } + { + name = "serde"; + packageId = "serde"; + features = [ "derive" ]; + } + { + name = "serde_json"; + packageId = "serde_json"; + } + { + name = "sha2"; + packageId = "sha2 0.10.6"; + } + { + name = "thiserror"; + packageId = "thiserror"; + } + ]; + devDependencies = [ + { + name = "serde_json"; + packageId = "serde_json"; + } + { + name = "test-case"; + packageId = "test-case"; + } + { + name = "test-generator"; + packageId = "test-generator"; + } + ]; + + }; + "nom8" = rec { + crateName = "nom8"; + version = "0.2.0"; + edition = "2018"; + sha256 = "1y6jzabxyrl05vxnh63r66ac2fh0symg5fnynxm4ii3zkif580df"; + dependencies = [ + { + name = "memchr"; + packageId = "memchr"; + usesDefaultFeatures = false; + } + ]; + features = { + "default" = [ "std" ]; + "std" = [ "alloc" "memchr/std" ]; + "unstable-doc" = [ "alloc" "std" ]; + }; + resolvedDefaultFeatures = [ "alloc" "default" "std" ]; + }; + "nu-ansi-term" = rec { + crateName = "nu-ansi-term"; + version = "0.46.0"; + edition = "2018"; + sha256 = "115sywxh53p190lyw97alm14nc004qj5jm5lvdj608z84rbida3p"; + authors = [ + "ogham@bsago.me" + "Ryan Scheel (Havvy) <ryan.havvy@gmail.com>" + "Josh Triplett <josh@joshtriplett.org>" + "The Nushell Project Developers" + ]; + dependencies = [ + { + name = "overload"; + packageId = "overload"; + } + { + name = "winapi"; + packageId = "winapi"; + target = { target, features }: ("windows" == target."os"); + features = [ "consoleapi" "errhandlingapi" "fileapi" "handleapi" "processenv" ]; + } + ]; + features = { + "derive_serde_style" = [ "serde" ]; + "serde" = [ "dep:serde" ]; + }; + }; + "num-traits" = rec { + crateName = "num-traits"; + version = "0.2.15"; + edition = "2015"; + sha256 = "1kfdqqw2ndz0wx2j75v9nbjx7d3mh3150zs4p5595y02rwsdx3jp"; + authors = [ + "The Rust Project Developers" + ]; + dependencies = [ + { + name = "libm"; + packageId = "libm"; + optional = true; + } + ]; + buildDependencies = [ + { + name = "autocfg"; + packageId = "autocfg"; + } + ]; + features = { + "default" = [ "std" ]; + "libm" = [ "dep:libm" ]; + }; + resolvedDefaultFeatures = [ "default" "libm" "std" ]; + }; + "num_cpus" = rec { + crateName = "num_cpus"; + version = "1.15.0"; + edition = "2015"; + sha256 = "0fsrjy3arnbcl41vz0gppya8d7d24cpkjgfflr3v8pivl4nrxb0g"; + authors = [ + "Sean McArthur <sean@seanmonstar.com>" + ]; + dependencies = [ + { + name = "hermit-abi"; + packageId = "hermit-abi 0.2.6"; + target = { target, features }: ((("x86_64" == target."arch") || ("aarch64" == target."arch")) && ("hermit" == target."os")); + } + { + name = "libc"; + packageId = "libc"; + target = { target, features }: (!(target."windows" or false)); + } + ]; + + }; + "once_cell" = rec { + crateName = "once_cell"; + version = "1.17.1"; + edition = "2021"; + sha256 = "1lrsy9c5ikf2iwxr4iwgd3rlq9mg8alh0np1g8abnvp1k4151rdp"; + authors = [ + "Aleksey Kladov <aleksey.kladov@gmail.com>" + ]; + features = { + "alloc" = [ "race" ]; + "atomic-polyfill" = [ "critical-section" ]; + "atomic_polyfill" = [ "dep:atomic_polyfill" ]; + "critical-section" = [ "critical_section" "atomic_polyfill" ]; + "critical_section" = [ "dep:critical_section" ]; + "default" = [ "std" ]; + "parking_lot" = [ "parking_lot_core" ]; + "parking_lot_core" = [ "dep:parking_lot_core" ]; + "std" = [ "alloc" ]; + }; + resolvedDefaultFeatures = [ "alloc" "default" "race" "std" ]; + }; + "oorandom" = rec { + crateName = "oorandom"; + version = "11.1.3"; + edition = "2018"; + sha256 = "0xdm4vd89aiwnrk1xjwzklnchjqvib4klcihlc2bsd4x50mbrc8a"; + authors = [ + "Simon Heath <icefox@dreamquest.io>" + ]; + + }; + "opaque-debug" = rec { + crateName = "opaque-debug"; + version = "0.2.3"; + edition = "2015"; + sha256 = "172j6bs8ndclqxa2m64qc0y1772rr73g4l9fg2svscgicnbfff98"; + authors = [ + "RustCrypto Developers" + ]; + + }; + "os_str_bytes" = rec { + crateName = "os_str_bytes"; + version = "6.5.0"; + edition = "2021"; + sha256 = "0rz2711gl575ng6vm9a97q42wqnf4wk1165wn221jb8gn17z9vff"; + authors = [ + "dylni" + ]; + features = { + "default" = [ "memchr" "raw_os_str" ]; + "memchr" = [ "dep:memchr" ]; + "print_bytes" = [ "dep:print_bytes" ]; + "uniquote" = [ "dep:uniquote" ]; + }; + resolvedDefaultFeatures = [ "raw_os_str" ]; + }; + "output_vt100" = rec { + crateName = "output_vt100"; + version = "0.1.3"; + edition = "2018"; + sha256 = "0rpvpiq7gkyvvwyp9sk0zxhbk99ldlrv5q3ycr03wkmbxgx270k2"; + authors = [ + "Phuntsok Drak-pa <phundrak@phundrak.fr>" + ]; + dependencies = [ + { + name = "winapi"; + packageId = "winapi"; + features = [ "winuser" "winbase" "consoleapi" "processenv" ]; + } + ]; + + }; + "overload" = rec { + crateName = "overload"; + version = "0.1.1"; + edition = "2018"; + sha256 = "0fdgbaqwknillagy1xq7xfgv60qdbk010diwl7s1p0qx7hb16n5i"; + authors = [ + "Daniel Salvadori <danaugrs@gmail.com>" + ]; + + }; + "parking_lot" = rec { + crateName = "parking_lot"; + version = "0.11.2"; + edition = "2018"; + sha256 = "16gzf41bxmm10x82bla8d6wfppy9ym3fxsmdjyvn61m66s0bf5vx"; + authors = [ + "Amanieu d'Antras <amanieu@gmail.com>" + ]; + dependencies = [ + { + name = "instant"; + packageId = "instant"; + } + { + name = "lock_api"; + packageId = "lock_api"; + } + { + name = "parking_lot_core"; + packageId = "parking_lot_core"; + } + ]; + features = { + "arc_lock" = [ "lock_api/arc_lock" ]; + "deadlock_detection" = [ "parking_lot_core/deadlock_detection" ]; + "nightly" = [ "parking_lot_core/nightly" "lock_api/nightly" ]; + "owning_ref" = [ "lock_api/owning_ref" ]; + "serde" = [ "lock_api/serde" ]; + "stdweb" = [ "instant/stdweb" ]; + "wasm-bindgen" = [ "instant/wasm-bindgen" ]; + }; + resolvedDefaultFeatures = [ "default" ]; + }; + "parking_lot_core" = rec { + crateName = "parking_lot_core"; + version = "0.8.6"; + edition = "2018"; + sha256 = "1p2nfcbr0b9lm9rglgm28k6mwyjwgm4knipsmqbgqaxdy3kcz8k0"; + authors = [ + "Amanieu d'Antras <amanieu@gmail.com>" + ]; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + { + name = "instant"; + packageId = "instant"; + } + { + name = "libc"; + packageId = "libc"; + target = { target, features }: (target."unix" or false); + } + { + name = "redox_syscall"; + packageId = "redox_syscall 0.2.16"; + target = { target, features }: ("redox" == target."os"); + } + { + name = "smallvec"; + packageId = "smallvec"; + } + { + name = "winapi"; + packageId = "winapi"; + target = { target, features }: (target."windows" or false); + features = [ "winnt" "ntstatus" "minwindef" "winerror" "winbase" "errhandlingapi" "handleapi" ]; + } + ]; + features = { + "backtrace" = [ "dep:backtrace" ]; + "deadlock_detection" = [ "petgraph" "thread-id" "backtrace" ]; + "petgraph" = [ "dep:petgraph" ]; + "thread-id" = [ "dep:thread-id" ]; + }; + }; + "path-clean" = rec { + crateName = "path-clean"; + version = "0.1.0"; + edition = "2015"; + sha256 = "1pcgqxw0mgg3ha5hi5xkjhyjf488bw5rw1g3qlr9awbq4szh3fpc"; + authors = [ + "Dan Reeves <hey@danreev.es>" + ]; + + }; + "percent-encoding" = rec { + crateName = "percent-encoding"; + version = "2.2.0"; + edition = "2018"; + sha256 = "13nrpp6r1f4k14viksga3094krcrxgv4b42kqbriy63k7ln5g327"; + authors = [ + "The rust-url developers" + ]; + features = { + "default" = [ "alloc" ]; + }; + resolvedDefaultFeatures = [ "alloc" "default" ]; + }; + "petgraph" = rec { + crateName = "petgraph"; + version = "0.6.3"; + edition = "2018"; + sha256 = "1912xw827flj8mzqm62jcbg0cv54qfhzm48c13ilzr9px67d5msd"; + authors = [ + "bluss" + "mitchmindtree" + ]; + dependencies = [ + { + name = "fixedbitset"; + packageId = "fixedbitset"; + usesDefaultFeatures = false; + } + { + name = "indexmap"; + packageId = "indexmap"; + features = [ "std" ]; + } + ]; + features = { + "all" = [ "unstable" "quickcheck" "matrix_graph" "stable_graph" "graphmap" ]; + "default" = [ "graphmap" "stable_graph" "matrix_graph" ]; + "quickcheck" = [ "dep:quickcheck" ]; + "serde" = [ "dep:serde" ]; + "serde-1" = [ "serde" "serde_derive" ]; + "serde_derive" = [ "dep:serde_derive" ]; + "unstable" = [ "generate" ]; + }; + }; + "pin-project" = rec { + crateName = "pin-project"; + version = "1.0.12"; + edition = "2018"; + sha256 = "1k3f9jkia3idxl2pqxamszwnl89dk52fa4jqj3p7zmmwnq4scadd"; + dependencies = [ + { + name = "pin-project-internal"; + packageId = "pin-project-internal"; + } + ]; + + }; + "pin-project-internal" = rec { + crateName = "pin-project-internal"; + version = "1.0.12"; + edition = "2018"; + sha256 = "0maa6icn7rdfy4xvgfaq7m7bwpw9f19wg76f1ncsiixd0lgdp6q6"; + procMacro = true; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "syn"; + packageId = "syn 1.0.109"; + features = [ "full" "visit-mut" ]; + } + ]; + + }; + "pin-project-lite" = rec { + crateName = "pin-project-lite"; + version = "0.2.9"; + edition = "2018"; + sha256 = "05n1z851l356hpgqadw4ar64mjanaxq1qlwqsf2k05ziq8xax9z0"; + + }; + "pin-utils" = rec { + crateName = "pin-utils"; + version = "0.1.0"; + edition = "2018"; + sha256 = "117ir7vslsl2z1a7qzhws4pd01cg2d3338c47swjyvqv2n60v1wb"; + authors = [ + "Josef Brandl <mail@josefbrandl.de>" + ]; + + }; + "plotters" = rec { + crateName = "plotters"; + version = "0.3.4"; + edition = "2018"; + sha256 = "15xzbxnjcfsaf8lac846lgi4xmn9k18m9k8gqm35aaa2wqwvcf15"; + authors = [ + "Hao Hou <haohou302@gmail.com>" + ]; + dependencies = [ + { + name = "num-traits"; + packageId = "num-traits"; + } + { + name = "plotters-backend"; + packageId = "plotters-backend"; + } + { + name = "plotters-svg"; + packageId = "plotters-svg"; + optional = true; + } + { + name = "wasm-bindgen"; + packageId = "wasm-bindgen"; + target = { target, features }: (("wasm32" == target."arch") && (!("wasi" == target."os"))); + } + { + name = "web-sys"; + packageId = "web-sys"; + target = { target, features }: (("wasm32" == target."arch") && (!("wasi" == target."os"))); + features = [ "Document" "DomRect" "Element" "HtmlElement" "Node" "Window" "HtmlCanvasElement" "CanvasRenderingContext2d" ]; + } + ]; + features = { + "all_elements" = [ "errorbar" "candlestick" "boxplot" "histogram" ]; + "all_series" = [ "area_series" "line_series" "point_series" "surface_series" ]; + "bitmap_backend" = [ "plotters-bitmap" "ttf" ]; + "bitmap_encoder" = [ "plotters-bitmap/image_encoder" ]; + "bitmap_gif" = [ "plotters-bitmap/gif_backend" ]; + "chrono" = [ "dep:chrono" ]; + "datetime" = [ "chrono" ]; + "default" = [ "bitmap_backend" "bitmap_encoder" "bitmap_gif" "svg_backend" "chrono" "ttf" "image" "deprecated_items" "all_series" "all_elements" "full_palette" ]; + "evcxr" = [ "svg_backend" ]; + "evcxr_bitmap" = [ "evcxr" "bitmap_backend" "plotters-svg/bitmap_encoder" ]; + "font-kit" = [ "dep:font-kit" ]; + "fontconfig-dlopen" = [ "font-kit/source-fontconfig-dlopen" ]; + "image" = [ "dep:image" ]; + "lazy_static" = [ "dep:lazy_static" ]; + "pathfinder_geometry" = [ "dep:pathfinder_geometry" ]; + "plotters-bitmap" = [ "dep:plotters-bitmap" ]; + "plotters-svg" = [ "dep:plotters-svg" ]; + "svg_backend" = [ "plotters-svg" ]; + "ttf" = [ "font-kit" "ttf-parser" "lazy_static" "pathfinder_geometry" ]; + "ttf-parser" = [ "dep:ttf-parser" ]; + }; + resolvedDefaultFeatures = [ "area_series" "line_series" "plotters-svg" "svg_backend" ]; + }; + "plotters-backend" = rec { + crateName = "plotters-backend"; + version = "0.3.4"; + edition = "2018"; + sha256 = "0hl1x8dqrzsjw1vabyw48gzp7g6z8rlyjqjc4b0wvzl1cdhjhchr"; + authors = [ + "Hao Hou <haohou302@gmail.com>" + ]; + + }; + "plotters-svg" = rec { + crateName = "plotters-svg"; + version = "0.3.3"; + edition = "2018"; + sha256 = "0vx5wmm5mxip3fm4l67l3wcvv3jwph4c70zpd3kdmqdab4kiva7r"; + authors = [ + "Hao Hou <haohou302@gmail.com>" + ]; + dependencies = [ + { + name = "plotters-backend"; + packageId = "plotters-backend"; + } + ]; + features = { + "bitmap_encoder" = [ "image" ]; + "image" = [ "dep:image" ]; + }; + }; + "ppv-lite86" = rec { + crateName = "ppv-lite86"; + version = "0.2.17"; + edition = "2018"; + sha256 = "1pp6g52aw970adv3x2310n7glqnji96z0a9wiamzw89ibf0ayh2v"; + authors = [ + "The CryptoCorrosion Contributors" + ]; + features = { + "default" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "simd" "std" ]; + }; + "pretty_assertions" = rec { + crateName = "pretty_assertions"; + version = "1.3.0"; + edition = "2018"; + sha256 = "0mgp1ajl3fdc55h989ph48znnk86m41j9dqnpg80yy5a435rnpm2"; + authors = [ + "Colin Kiegel <kiegel@gmx.de>" + "Florent Fayolle <florent.fayolle69@gmail.com>" + "Tom Milligan <code@tommilligan.net>" + ]; + dependencies = [ + { + name = "ctor"; + packageId = "ctor"; + target = { target, features }: (target."windows" or false); + } + { + name = "diff"; + packageId = "diff"; + } + { + name = "output_vt100"; + packageId = "output_vt100"; + target = { target, features }: (target."windows" or false); + } + { + name = "yansi"; + packageId = "yansi"; + } + ]; + features = { + "default" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "prettyplease" = rec { + crateName = "prettyplease"; + version = "0.1.25"; + edition = "2021"; + sha256 = "11lskniv8pf8y8bn4dc3nmjapfhnibxbm5gamp2ad9qna3lld1kc"; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + usesDefaultFeatures = false; + } + { + name = "syn"; + packageId = "syn 1.0.109"; + usesDefaultFeatures = false; + features = [ "full" ]; + } + ]; + devDependencies = [ + { + name = "syn"; + packageId = "syn 1.0.109"; + usesDefaultFeatures = false; + features = [ "parsing" ]; + } + ]; + features = { + "verbatim" = [ "syn/parsing" ]; + }; + }; + "proc-macro-error" = rec { + crateName = "proc-macro-error"; + version = "1.0.4"; + edition = "2018"; + sha256 = "1373bhxaf0pagd8zkyd03kkx6bchzf6g0dkwrwzsnal9z47lj9fs"; + authors = [ + "CreepySkeleton <creepy-skeleton@yandex.ru>" + ]; + dependencies = [ + { + name = "proc-macro-error-attr"; + packageId = "proc-macro-error-attr"; + } + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "syn"; + packageId = "syn 1.0.109"; + optional = true; + usesDefaultFeatures = false; + } + ]; + buildDependencies = [ + { + name = "version_check"; + packageId = "version_check"; + } + ]; + features = { + "default" = [ "syn-error" ]; + "syn" = [ "dep:syn" ]; + "syn-error" = [ "syn" ]; + }; + resolvedDefaultFeatures = [ "default" "syn" "syn-error" ]; + }; + "proc-macro-error-attr" = rec { + crateName = "proc-macro-error-attr"; + version = "1.0.4"; + edition = "2018"; + sha256 = "0sgq6m5jfmasmwwy8x4mjygx5l7kp8s4j60bv25ckv2j1qc41gm1"; + procMacro = true; + authors = [ + "CreepySkeleton <creepy-skeleton@yandex.ru>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + ]; + buildDependencies = [ + { + name = "version_check"; + packageId = "version_check"; + } + ]; + + }; + "proc-macro2 0.4.30" = rec { + crateName = "proc-macro2"; + version = "0.4.30"; + edition = "2015"; + sha256 = "0nd71fl24sys066jrha6j7i34nfkjv44yzw8yww9742wmc8j0gfg"; + authors = [ + "Alex Crichton <alex@alexcrichton.com>" + ]; + dependencies = [ + { + name = "unicode-xid"; + packageId = "unicode-xid"; + } + ]; + features = { + "default" = [ "proc-macro" ]; + }; + resolvedDefaultFeatures = [ "default" "proc-macro" ]; + }; + "proc-macro2 1.0.56" = rec { + crateName = "proc-macro2"; + version = "1.0.56"; + edition = "2018"; + sha256 = "0ddlk2c7s9c0fhmf8cd0wikayicv9xrm9ck9vzgg9w86rnqbsqrb"; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + "Alex Crichton <alex@alexcrichton.com>" + ]; + dependencies = [ + { + name = "unicode-ident"; + packageId = "unicode-ident"; + } + ]; + features = { + "default" = [ "proc-macro" ]; + }; + resolvedDefaultFeatures = [ "default" "proc-macro" ]; + }; + "proptest" = rec { + crateName = "proptest"; + version = "1.1.0"; + edition = "2018"; + sha256 = "0w5s3wwchd3i6m15lsbsk3r3y25d1gwn13m6a185ks8w06cbiw99"; + authors = [ + "Jason Lingle" + ]; + dependencies = [ + { + name = "bit-set"; + packageId = "bit-set"; + optional = true; + } + { + name = "bitflags"; + packageId = "bitflags"; + } + { + name = "byteorder"; + packageId = "byteorder"; + usesDefaultFeatures = false; + } + { + name = "lazy_static"; + packageId = "lazy_static"; + optional = true; + } + { + name = "num-traits"; + packageId = "num-traits"; + usesDefaultFeatures = false; + features = [ "libm" ]; + } + { + name = "quick-error"; + packageId = "quick-error 2.0.1"; + optional = true; + } + { + name = "rand"; + packageId = "rand 0.8.5"; + usesDefaultFeatures = false; + features = [ "alloc" ]; + } + { + name = "rand_chacha"; + packageId = "rand_chacha"; + usesDefaultFeatures = false; + } + { + name = "rand_xorshift"; + packageId = "rand_xorshift"; + } + { + name = "regex-syntax"; + packageId = "regex-syntax 0.6.29"; + optional = true; + } + { + name = "rusty-fork"; + packageId = "rusty-fork"; + optional = true; + usesDefaultFeatures = false; + } + { + name = "tempfile"; + packageId = "tempfile"; + optional = true; + } + { + name = "unarray"; + packageId = "unarray"; + } + ]; + features = { + "bit-set" = [ "dep:bit-set" ]; + "default" = [ "std" "fork" "timeout" "bit-set" "break-dead-code" ]; + "default-code-coverage" = [ "std" "fork" "timeout" "bit-set" ]; + "fork" = [ "std" "rusty-fork" "tempfile" ]; + "hardware-rng" = [ "x86" ]; + "lazy_static" = [ "dep:lazy_static" ]; + "quick-error" = [ "dep:quick-error" ]; + "regex-syntax" = [ "dep:regex-syntax" ]; + "rusty-fork" = [ "dep:rusty-fork" ]; + "std" = [ "rand/std" "byteorder/std" "lazy_static" "quick-error" "regex-syntax" "num-traits/std" ]; + "tempfile" = [ "dep:tempfile" ]; + "timeout" = [ "fork" "rusty-fork/timeout" ]; + "x86" = [ "dep:x86" ]; + }; + resolvedDefaultFeatures = [ "alloc" "bit-set" "break-dead-code" "default" "fork" "lazy_static" "quick-error" "regex-syntax" "rusty-fork" "std" "tempfile" "timeout" ]; + }; + "prost" = rec { + crateName = "prost"; + version = "0.11.9"; + edition = "2021"; + sha256 = "1kc1hva2h894hc0zf6r4r8fsxfpazf7xn5rj3jya9sbrsyhym0hb"; + authors = [ + "Dan Burkert <dan@danburkert.com>" + "Lucio Franco <luciofranco14@gmail.com" + "Tokio Contributors <team@tokio.rs>" + ]; + dependencies = [ + { + name = "bytes"; + packageId = "bytes"; + usesDefaultFeatures = false; + } + { + name = "prost-derive"; + packageId = "prost-derive"; + optional = true; + } + ]; + features = { + "default" = [ "prost-derive" "std" ]; + "prost-derive" = [ "dep:prost-derive" ]; + }; + resolvedDefaultFeatures = [ "default" "prost-derive" "std" ]; + }; + "prost-build" = rec { + crateName = "prost-build"; + version = "0.11.9"; + edition = "2021"; + sha256 = "0w5jx97q96ydhkg67wx3lb11kfy8195c56g0476glzws5iak758i"; + authors = [ + "Dan Burkert <dan@danburkert.com>" + "Lucio Franco <luciofranco14@gmail.com>" + "Tokio Contributors <team@tokio.rs>" + ]; + dependencies = [ + { + name = "bytes"; + packageId = "bytes"; + usesDefaultFeatures = false; + } + { + name = "heck"; + packageId = "heck"; + } + { + name = "itertools"; + packageId = "itertools"; + usesDefaultFeatures = false; + features = [ "use_alloc" ]; + } + { + name = "lazy_static"; + packageId = "lazy_static"; + } + { + name = "log"; + packageId = "log"; + } + { + name = "multimap"; + packageId = "multimap"; + usesDefaultFeatures = false; + } + { + name = "petgraph"; + packageId = "petgraph"; + usesDefaultFeatures = false; + } + { + name = "prettyplease"; + packageId = "prettyplease"; + optional = true; + } + { + name = "prost"; + packageId = "prost"; + usesDefaultFeatures = false; + } + { + name = "prost-types"; + packageId = "prost-types"; + usesDefaultFeatures = false; + } + { + name = "regex"; + packageId = "regex"; + usesDefaultFeatures = false; + features = [ "std" "unicode-bool" ]; + } + { + name = "syn"; + packageId = "syn 1.0.109"; + optional = true; + features = [ "full" ]; + } + { + name = "tempfile"; + packageId = "tempfile"; + } + { + name = "which"; + packageId = "which"; + } + ]; + features = { + "cleanup-markdown" = [ "pulldown-cmark" "pulldown-cmark-to-cmark" ]; + "default" = [ "format" ]; + "format" = [ "prettyplease" "syn" ]; + "prettyplease" = [ "dep:prettyplease" ]; + "pulldown-cmark" = [ "dep:pulldown-cmark" ]; + "pulldown-cmark-to-cmark" = [ "dep:pulldown-cmark-to-cmark" ]; + "syn" = [ "dep:syn" ]; + }; + resolvedDefaultFeatures = [ "default" "format" "prettyplease" "syn" ]; + }; + "prost-derive" = rec { + crateName = "prost-derive"; + version = "0.11.9"; + edition = "2021"; + sha256 = "1d3mw2s2jba1f7wcjmjd6ha2a255p2rmynxhm1nysv9w1z8xilp5"; + procMacro = true; + authors = [ + "Dan Burkert <dan@danburkert.com>" + "Lucio Franco <luciofranco14@gmail.com>" + "Tokio Contributors <team@tokio.rs>" + ]; + dependencies = [ + { + name = "anyhow"; + packageId = "anyhow"; + } + { + name = "itertools"; + packageId = "itertools"; + usesDefaultFeatures = false; + features = [ "use_alloc" ]; + } + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "syn"; + packageId = "syn 1.0.109"; + features = [ "extra-traits" ]; + } + ]; + + }; + "prost-types" = rec { + crateName = "prost-types"; + version = "0.11.9"; + edition = "2021"; + sha256 = "04ryk38sqkp2nf4dgdqdfbgn6zwwvjraw6hqq6d9a6088shj4di1"; + authors = [ + "Dan Burkert <dan@danburkert.com>" + "Lucio Franco <luciofranco14@gmail.com" + "Tokio Contributors <team@tokio.rs>" + ]; + dependencies = [ + { + name = "prost"; + packageId = "prost"; + usesDefaultFeatures = false; + features = [ "prost-derive" ]; + } + ]; + features = { + "default" = [ "std" ]; + "std" = [ "prost/std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "quick-error 1.2.3" = rec { + crateName = "quick-error"; + version = "1.2.3"; + edition = "2015"; + sha256 = "1q6za3v78hsspisc197bg3g7rpc989qycy8ypr8ap8igv10ikl51"; + authors = [ + "Paul Colomiets <paul@colomiets.name>" + "Colin Kiegel <kiegel@gmx.de>" + ]; + + }; + "quick-error 2.0.1" = rec { + crateName = "quick-error"; + version = "2.0.1"; + edition = "2018"; + sha256 = "18z6r2rcjvvf8cn92xjhm2qc3jpd1ljvcbf12zv0k9p565gmb4x9"; + authors = [ + "Paul Colomiets <paul@colomiets.name>" + "Colin Kiegel <kiegel@gmx.de>" + ]; + + }; + "quote 0.6.13" = rec { + crateName = "quote"; + version = "0.6.13"; + edition = "2015"; + sha256 = "1qgqq48jymp5h4y082aanf25hrw6bpb678xh3zw993qfhxmkpqkc"; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 0.4.30"; + usesDefaultFeatures = false; + } + ]; + features = { + "default" = [ "proc-macro" ]; + "proc-macro" = [ "proc-macro2/proc-macro" ]; + }; + resolvedDefaultFeatures = [ "default" "proc-macro" ]; + }; + "quote 1.0.26" = rec { + crateName = "quote"; + version = "1.0.26"; + edition = "2018"; + sha256 = "1z521piwggwzs0rj4wjx4ma6af1g6f1h5dkp382y5akqyx5sy924"; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + usesDefaultFeatures = false; + } + ]; + features = { + "default" = [ "proc-macro" ]; + "proc-macro" = [ "proc-macro2/proc-macro" ]; + }; + resolvedDefaultFeatures = [ "default" "proc-macro" ]; + }; + "radix_trie" = rec { + crateName = "radix_trie"; + version = "0.2.1"; + edition = "2018"; + sha256 = "1zaq3im5ss03w91ij11cj97vvzc5y1f3064d9pi2ysnwziww2sf0"; + authors = [ + "Michael Sproul <micsproul@gmail.com>" + ]; + dependencies = [ + { + name = "endian-type"; + packageId = "endian-type"; + } + { + name = "nibble_vec"; + packageId = "nibble_vec"; + } + ]; + features = { + "serde" = [ "dep:serde" ]; + }; + }; + "rand 0.4.6" = rec { + crateName = "rand"; + version = "0.4.6"; + edition = "2015"; + sha256 = "14qjfv3gggzhnma20k0sc1jf8y6pplsaq7n1j9ls5c8kf2wl0a2m"; + authors = [ + "The Rust Project Developers" + ]; + dependencies = [ + { + name = "fuchsia-cprng"; + packageId = "fuchsia-cprng"; + target = { target, features }: ("fuchsia" == target."os"); + } + { + name = "libc"; + packageId = "libc"; + optional = true; + target = { target, features }: (target."unix" or false); + } + { + name = "rand_core"; + packageId = "rand_core 0.3.1"; + usesDefaultFeatures = false; + target = { target, features }: ("sgx" == target."env"); + } + { + name = "rdrand"; + packageId = "rdrand"; + target = { target, features }: ("sgx" == target."env"); + } + { + name = "winapi"; + packageId = "winapi"; + target = { target, features }: (target."windows" or false); + features = [ "minwindef" "ntsecapi" "profileapi" "winnt" ]; + } + ]; + features = { + "default" = [ "std" ]; + "libc" = [ "dep:libc" ]; + "nightly" = [ "i128_support" ]; + "std" = [ "libc" ]; + }; + resolvedDefaultFeatures = [ "default" "libc" "std" ]; + }; + "rand 0.8.5" = rec { + crateName = "rand"; + version = "0.8.5"; + edition = "2018"; + sha256 = "013l6931nn7gkc23jz5mm3qdhf93jjf0fg64nz2lp4i51qd8vbrl"; + authors = [ + "The Rand Project Developers" + "The Rust Project Developers" + ]; + dependencies = [ + { + name = "libc"; + packageId = "libc"; + optional = true; + usesDefaultFeatures = false; + target = { target, features }: (target."unix" or false); + } + { + name = "rand_chacha"; + packageId = "rand_chacha"; + optional = true; + usesDefaultFeatures = false; + } + { + name = "rand_core"; + packageId = "rand_core 0.6.4"; + } + ]; + features = { + "alloc" = [ "rand_core/alloc" ]; + "default" = [ "std" "std_rng" ]; + "getrandom" = [ "rand_core/getrandom" ]; + "libc" = [ "dep:libc" ]; + "log" = [ "dep:log" ]; + "packed_simd" = [ "dep:packed_simd" ]; + "rand_chacha" = [ "dep:rand_chacha" ]; + "serde" = [ "dep:serde" ]; + "serde1" = [ "serde" "rand_core/serde1" ]; + "simd_support" = [ "packed_simd" ]; + "std" = [ "rand_core/std" "rand_chacha/std" "alloc" "getrandom" "libc" ]; + "std_rng" = [ "rand_chacha" ]; + }; + resolvedDefaultFeatures = [ "alloc" "default" "getrandom" "libc" "rand_chacha" "small_rng" "std" "std_rng" ]; + }; + "rand_chacha" = rec { + crateName = "rand_chacha"; + version = "0.3.1"; + edition = "2018"; + sha256 = "123x2adin558xbhvqb8w4f6syjsdkmqff8cxwhmjacpsl1ihmhg6"; + authors = [ + "The Rand Project Developers" + "The Rust Project Developers" + "The CryptoCorrosion Contributors" + ]; + dependencies = [ + { + name = "ppv-lite86"; + packageId = "ppv-lite86"; + usesDefaultFeatures = false; + features = [ "simd" ]; + } + { + name = "rand_core"; + packageId = "rand_core 0.6.4"; + } + ]; + features = { + "default" = [ "std" ]; + "serde" = [ "dep:serde" ]; + "serde1" = [ "serde" ]; + "std" = [ "ppv-lite86/std" ]; + }; + resolvedDefaultFeatures = [ "std" ]; + }; + "rand_core 0.3.1" = rec { + crateName = "rand_core"; + version = "0.3.1"; + edition = "2015"; + sha256 = "0jzdgszfa4bliigiy4hi66k7fs3gfwi2qxn8vik84ph77fwdwvvs"; + authors = [ + "The Rand Project Developers" + "The Rust Project Developers" + ]; + dependencies = [ + { + name = "rand_core"; + packageId = "rand_core 0.4.2"; + } + ]; + features = { + "alloc" = [ "rand_core/alloc" ]; + "default" = [ "std" ]; + "serde1" = [ "rand_core/serde1" ]; + "std" = [ "rand_core/std" ]; + }; + }; + "rand_core 0.4.2" = rec { + crateName = "rand_core"; + version = "0.4.2"; + edition = "2015"; + sha256 = "1p09ynysrq1vcdlmcqnapq4qakl2yd1ng3kxh3qscpx09k2a6cww"; + authors = [ + "The Rand Project Developers" + "The Rust Project Developers" + ]; + features = { + "serde" = [ "dep:serde" ]; + "serde1" = [ "serde" "serde_derive" ]; + "serde_derive" = [ "dep:serde_derive" ]; + "std" = [ "alloc" ]; + }; + }; + "rand_core 0.6.4" = rec { + crateName = "rand_core"; + version = "0.6.4"; + edition = "2018"; + sha256 = "0b4j2v4cb5krak1pv6kakv4sz6xcwbrmy2zckc32hsigbrwy82zc"; + authors = [ + "The Rand Project Developers" + "The Rust Project Developers" + ]; + dependencies = [ + { + name = "getrandom"; + packageId = "getrandom"; + optional = true; + } + ]; + features = { + "getrandom" = [ "dep:getrandom" ]; + "serde" = [ "dep:serde" ]; + "serde1" = [ "serde" ]; + "std" = [ "alloc" "getrandom" "getrandom/std" ]; + }; + resolvedDefaultFeatures = [ "alloc" "getrandom" "std" ]; + }; + "rand_xorshift" = rec { + crateName = "rand_xorshift"; + version = "0.3.0"; + edition = "2018"; + sha256 = "13vcag7gmqspzyabfl1gr9ykvxd2142q2agrj8dkyjmfqmgg4nyj"; + authors = [ + "The Rand Project Developers" + "The Rust Project Developers" + ]; + dependencies = [ + { + name = "rand_core"; + packageId = "rand_core 0.6.4"; + } + ]; + features = { + "serde" = [ "dep:serde" ]; + "serde1" = [ "serde" ]; + }; + }; + "rand_xoshiro" = rec { + crateName = "rand_xoshiro"; + version = "0.6.0"; + edition = "2018"; + sha256 = "1ajsic84rzwz5qr0mzlay8vi17swqi684bqvwqyiim3flfrcv5vg"; + authors = [ + "The Rand Project Developers" + ]; + dependencies = [ + { + name = "rand_core"; + packageId = "rand_core 0.6.4"; + } + ]; + features = { + "serde" = [ "dep:serde" ]; + "serde1" = [ "serde" ]; + }; + }; + "rayon" = rec { + crateName = "rayon"; + version = "1.7.0"; + edition = "2021"; + sha256 = "0fzh8w5ds1qjhilll4rkpd3kimw70zi5605wprxcig1pdqczab8x"; + authors = [ + "Niko Matsakis <niko@alum.mit.edu>" + "Josh Stone <cuviper@gmail.com>" + ]; + dependencies = [ + { + name = "either"; + packageId = "either"; + usesDefaultFeatures = false; + } + { + name = "rayon-core"; + packageId = "rayon-core"; + } + ]; + + }; + "rayon-core" = rec { + crateName = "rayon-core"; + version = "1.11.0"; + edition = "2021"; + sha256 = "13dymrhhdilzpbfh3aylv6ariayqdfk614b3frvwixb6d6yrb3sb"; + authors = [ + "Niko Matsakis <niko@alum.mit.edu>" + "Josh Stone <cuviper@gmail.com>" + ]; + dependencies = [ + { + name = "crossbeam-channel"; + packageId = "crossbeam-channel"; + } + { + name = "crossbeam-deque"; + packageId = "crossbeam-deque"; + } + { + name = "crossbeam-utils"; + packageId = "crossbeam-utils"; + } + { + name = "num_cpus"; + packageId = "num_cpus"; + } + ]; + + }; + "rdrand" = rec { + crateName = "rdrand"; + version = "0.4.0"; + edition = "2015"; + sha256 = "1cjq0kwx1bk7jx3kzyciiish5gqsj7620dm43dc52sr8fzmm9037"; + authors = [ + "Simonas Kazlauskas <rdrand@kazlauskas.me>" + ]; + dependencies = [ + { + name = "rand_core"; + packageId = "rand_core 0.3.1"; + usesDefaultFeatures = false; + } + ]; + features = { + "default" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "redox_syscall 0.2.16" = rec { + crateName = "redox_syscall"; + version = "0.2.16"; + edition = "2018"; + sha256 = "16jicm96kjyzm802cxdd1k9jmcph0db1a4lhslcnhjsvhp0mhnpv"; + libName = "syscall"; + authors = [ + "Jeremy Soller <jackpot51@gmail.com>" + ]; + dependencies = [ + { + name = "bitflags"; + packageId = "bitflags"; + } + ]; + + }; + "redox_syscall 0.3.5" = rec { + crateName = "redox_syscall"; + version = "0.3.5"; + edition = "2018"; + sha256 = "0acgiy2lc1m2vr8cr33l5s7k9wzby8dybyab1a9p753hcbr68xjn"; + libName = "syscall"; + authors = [ + "Jeremy Soller <jackpot51@gmail.com>" + ]; + dependencies = [ + { + name = "bitflags"; + packageId = "bitflags"; + } + ]; + features = { + "core" = [ "dep:core" ]; + "rustc-dep-of-std" = [ "core" "bitflags/rustc-dep-of-std" ]; + }; + }; + "redox_users" = rec { + crateName = "redox_users"; + version = "0.4.3"; + edition = "2018"; + sha256 = "0asw3s4iy69knafkhvlbchy230qawc297vddjdwjs5nglwvxhcxh"; + authors = [ + "Jose Narvaez <goyox86@gmail.com>" + "Wesley Hershberger <mggmugginsmc@gmail.com>" + ]; + dependencies = [ + { + name = "getrandom"; + packageId = "getrandom"; + features = [ "std" ]; + } + { + name = "redox_syscall"; + packageId = "redox_syscall 0.2.16"; + } + { + name = "thiserror"; + packageId = "thiserror"; + } + ]; + features = { + "auth" = [ "rust-argon2" "zeroize" ]; + "default" = [ "auth" ]; + "rust-argon2" = [ "dep:rust-argon2" ]; + "zeroize" = [ "dep:zeroize" ]; + }; + }; + "regex" = rec { + crateName = "regex"; + version = "1.8.1"; + edition = "2021"; + sha256 = "0w2kgdvs2fsv39hrsb912zjq3bx5vw1cchslvbi6mk1iycbyd0xg"; + authors = [ + "The Rust Project Developers" + ]; + dependencies = [ + { + name = "aho-corasick"; + packageId = "aho-corasick"; + optional = true; + } + { + name = "memchr"; + packageId = "memchr"; + optional = true; + } + { + name = "regex-syntax"; + packageId = "regex-syntax 0.7.1"; + usesDefaultFeatures = false; + } + ]; + features = { + "aho-corasick" = [ "dep:aho-corasick" ]; + "default" = [ "std" "perf" "unicode" "regex-syntax/default" ]; + "memchr" = [ "dep:memchr" ]; + "perf" = [ "perf-cache" "perf-dfa" "perf-inline" "perf-literal" ]; + "perf-literal" = [ "aho-corasick" "memchr" ]; + "unicode" = [ "unicode-age" "unicode-bool" "unicode-case" "unicode-gencat" "unicode-perl" "unicode-script" "unicode-segment" "regex-syntax/unicode" ]; + "unicode-age" = [ "regex-syntax/unicode-age" ]; + "unicode-bool" = [ "regex-syntax/unicode-bool" ]; + "unicode-case" = [ "regex-syntax/unicode-case" ]; + "unicode-gencat" = [ "regex-syntax/unicode-gencat" ]; + "unicode-perl" = [ "regex-syntax/unicode-perl" ]; + "unicode-script" = [ "regex-syntax/unicode-script" ]; + "unicode-segment" = [ "regex-syntax/unicode-segment" ]; + "unstable" = [ "pattern" ]; + "use_std" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "aho-corasick" "default" "memchr" "perf" "perf-cache" "perf-dfa" "perf-inline" "perf-literal" "std" "unicode" "unicode-age" "unicode-bool" "unicode-case" "unicode-gencat" "unicode-perl" "unicode-script" "unicode-segment" ]; + }; + "regex-syntax 0.6.29" = rec { + crateName = "regex-syntax"; + version = "0.6.29"; + edition = "2018"; + sha256 = "1qgj49vm6y3zn1hi09x91jvgkl2b1fiaq402skj83280ggfwcqpi"; + authors = [ + "The Rust Project Developers" + ]; + features = { + "default" = [ "unicode" ]; + "unicode" = [ "unicode-age" "unicode-bool" "unicode-case" "unicode-gencat" "unicode-perl" "unicode-script" "unicode-segment" ]; + }; + resolvedDefaultFeatures = [ "default" "unicode" "unicode-age" "unicode-bool" "unicode-case" "unicode-gencat" "unicode-perl" "unicode-script" "unicode-segment" ]; + }; + "regex-syntax 0.7.1" = rec { + crateName = "regex-syntax"; + version = "0.7.1"; + edition = "2021"; + sha256 = "0g1s6ra0ra8xy1fxscspd406c3pn53bjm1is8phamlwvy6a656d5"; + authors = [ + "The Rust Project Developers" + ]; + features = { + "default" = [ "std" "unicode" ]; + "unicode" = [ "unicode-age" "unicode-bool" "unicode-case" "unicode-gencat" "unicode-perl" "unicode-script" "unicode-segment" ]; + }; + resolvedDefaultFeatures = [ "default" "std" "unicode" "unicode-age" "unicode-bool" "unicode-case" "unicode-gencat" "unicode-perl" "unicode-script" "unicode-segment" ]; + }; + "remove_dir_all" = rec { + crateName = "remove_dir_all"; + version = "0.5.3"; + edition = "2015"; + sha256 = "1rzqbsgkmr053bxxl04vmvsd1njyz0nxvly97aip6aa2cmb15k9s"; + authors = [ + "Aaronepower <theaaronepower@gmail.com>" + ]; + dependencies = [ + { + name = "winapi"; + packageId = "winapi"; + target = { target, features }: (target."windows" or false); + features = [ "std" "errhandlingapi" "winerror" "fileapi" "winbase" ]; + } + ]; + + }; + "rnix" = rec { + crateName = "rnix"; + version = "0.11.0"; + edition = "2021"; + sha256 = "0pybq9gp4b7lp0066236jpqi9lgb1bzvqc9axymwrq3hxgdwwddv"; + authors = [ + "jD91mZM2 <me@krake.one>" + ]; + dependencies = [ + { + name = "rowan"; + packageId = "rowan"; + } + ]; + + }; + "rowan" = rec { + crateName = "rowan"; + version = "0.15.11"; + edition = "2021"; + sha256 = "1gvqa02nmssbl10a4apvi63l95pfllnhxqvasmg4ffj8z7z9qi34"; + authors = [ + "Aleksey Kladov <aleksey.kladov@gmail.com>" + ]; + dependencies = [ + { + name = "countme"; + packageId = "countme"; + } + { + name = "hashbrown"; + packageId = "hashbrown"; + usesDefaultFeatures = false; + features = [ "inline-more" ]; + } + { + name = "memoffset"; + packageId = "memoffset"; + } + { + name = "rustc-hash"; + packageId = "rustc-hash"; + } + { + name = "text-size"; + packageId = "text-size"; + } + ]; + features = { + "serde" = [ "dep:serde" ]; + "serde1" = [ "serde" "text-size/serde" ]; + }; + }; + "rustc-hash" = rec { + crateName = "rustc-hash"; + version = "1.1.0"; + edition = "2015"; + sha256 = "1qkc5khrmv5pqi5l5ca9p5nl5hs742cagrndhbrlk3dhlrx3zm08"; + authors = [ + "The Rust Project Developers" + ]; + features = { + "default" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "rustix" = rec { + crateName = "rustix"; + version = "0.37.19"; + edition = "2018"; + sha256 = "0gb12rp992bh2h5msqcbpdsx6h1gslsb0zpp5hdnyxj2hnfp5y5c"; + authors = [ + "Dan Gohman <dev@sunfishcode.online>" + "Jakub Konka <kubkon@jakubkonka.com>" + ]; + dependencies = [ + { + name = "bitflags"; + packageId = "bitflags"; + } + { + name = "errno"; + packageId = "errno"; + rename = "libc_errno"; + optional = true; + usesDefaultFeatures = false; + target = { target, features }: ((!(target."rustix_use_libc" or false)) && (!(target."miri" or false)) && ("linux" == target."os") && (("x86" == target."arch") || (("x86_64" == target."arch") && ("64" == target."pointer_width")) || (("little" == target."endian") && (("arm" == target."arch") || (("aarch64" == target."arch") && ("64" == target."pointer_width")) || ("powerpc64" == target."arch") || ("riscv64" == target."arch") || ("mips" == target."arch") || ("mips64" == target."arch"))))); + } + { + name = "errno"; + packageId = "errno"; + rename = "libc_errno"; + usesDefaultFeatures = false; + target = { target, features }: ((!(target."windows" or false)) && ((target."rustix_use_libc" or false) || (target."miri" or false) || (!(("linux" == target."os") && (("x86" == target."arch") || (("x86_64" == target."arch") && ("64" == target."pointer_width")) || (("little" == target."endian") && (("arm" == target."arch") || (("aarch64" == target."arch") && ("64" == target."pointer_width")) || ("powerpc64" == target."arch") || ("riscv64" == target."arch") || ("mips" == target."arch") || ("mips64" == target."arch")))))))); + } + { + name = "errno"; + packageId = "errno"; + rename = "libc_errno"; + usesDefaultFeatures = false; + target = { target, features }: (target."windows" or false); + } + { + name = "io-lifetimes"; + packageId = "io-lifetimes"; + optional = true; + usesDefaultFeatures = false; + features = [ "close" ]; + } + { + name = "libc"; + packageId = "libc"; + optional = true; + target = { target, features }: ((!(target."rustix_use_libc" or false)) && (!(target."miri" or false)) && ("linux" == target."os") && (("x86" == target."arch") || (("x86_64" == target."arch") && ("64" == target."pointer_width")) || (("little" == target."endian") && (("arm" == target."arch") || (("aarch64" == target."arch") && ("64" == target."pointer_width")) || ("powerpc64" == target."arch") || ("riscv64" == target."arch") || ("mips" == target."arch") || ("mips64" == target."arch"))))); + features = [ "extra_traits" ]; + } + { + name = "libc"; + packageId = "libc"; + target = { target, features }: ((!(target."windows" or false)) && ((target."rustix_use_libc" or false) || (target."miri" or false) || (!(("linux" == target."os") && (("x86" == target."arch") || (("x86_64" == target."arch") && ("64" == target."pointer_width")) || (("little" == target."endian") && (("arm" == target."arch") || (("aarch64" == target."arch") && ("64" == target."pointer_width")) || ("powerpc64" == target."arch") || ("riscv64" == target."arch") || ("mips" == target."arch") || ("mips64" == target."arch")))))))); + features = [ "extra_traits" ]; + } + { + name = "linux-raw-sys"; + packageId = "linux-raw-sys"; + usesDefaultFeatures = false; + target = { target, features }: ((("android" == target."os") || ("linux" == target."os")) && ((target."rustix_use_libc" or false) || (target."miri" or false) || (!(("linux" == target."os") && (("x86" == target."arch") || (("x86_64" == target."arch") && ("64" == target."pointer_width")) || (("little" == target."endian") && (("arm" == target."arch") || (("aarch64" == target."arch") && ("64" == target."pointer_width")) || ("powerpc64" == target."arch") || ("riscv64" == target."arch") || ("mips" == target."arch") || ("mips64" == target."arch")))))))); + features = [ "general" "ioctl" "no_std" ]; + } + { + name = "linux-raw-sys"; + packageId = "linux-raw-sys"; + usesDefaultFeatures = false; + target = { target, features }: ((!(target."rustix_use_libc" or false)) && (!(target."miri" or false)) && ("linux" == target."os") && (("x86" == target."arch") || (("x86_64" == target."arch") && ("64" == target."pointer_width")) || (("little" == target."endian") && (("arm" == target."arch") || (("aarch64" == target."arch") && ("64" == target."pointer_width")) || ("powerpc64" == target."arch") || ("riscv64" == target."arch") || ("mips" == target."arch") || ("mips64" == target."arch"))))); + features = [ "general" "errno" "ioctl" "no_std" ]; + } + { + name = "windows-sys"; + packageId = "windows-sys 0.48.0"; + target = { target, features }: (target."windows" or false); + features = [ "Win32_Foundation" "Win32_Networking_WinSock" "Win32_NetworkManagement_IpHelper" "Win32_System_Threading" ]; + } + ]; + devDependencies = [ + { + name = "errno"; + packageId = "errno"; + rename = "libc_errno"; + usesDefaultFeatures = false; + } + { + name = "io-lifetimes"; + packageId = "io-lifetimes"; + usesDefaultFeatures = false; + features = [ "close" ]; + } + { + name = "libc"; + packageId = "libc"; + } + ]; + features = { + "all-apis" = [ "fs" "io_uring" "mm" "net" "param" "process" "procfs" "rand" "runtime" "termios" "thread" "time" ]; + "all-impls" = [ "os_pipe" "fs-err" ]; + "alloc" = [ "dep:alloc" ]; + "cc" = [ "dep:cc" ]; + "compiler_builtins" = [ "dep:compiler_builtins" ]; + "core" = [ "dep:core" ]; + "default" = [ "std" "use-libc-auxv" ]; + "fs-err" = [ "io-lifetimes/fs-err" ]; + "io-lifetimes" = [ "dep:io-lifetimes" ]; + "io_uring" = [ "fs" "net" ]; + "itoa" = [ "dep:itoa" ]; + "libc" = [ "dep:libc" ]; + "libc_errno" = [ "dep:libc_errno" ]; + "linux_latest" = [ "linux_4_11" ]; + "once_cell" = [ "dep:once_cell" ]; + "os_pipe" = [ "io-lifetimes/os_pipe" ]; + "param" = [ "fs" ]; + "procfs" = [ "once_cell" "itoa" "fs" ]; + "rustc-dep-of-std" = [ "core" "alloc" "compiler_builtins" "linux-raw-sys/rustc-dep-of-std" "bitflags/rustc-dep-of-std" ]; + "std" = [ "io-lifetimes" ]; + "use-libc" = [ "libc_errno" "libc" ]; + "use-libc-auxv" = [ "libc" ]; + }; + resolvedDefaultFeatures = [ "default" "fs" "io-lifetimes" "libc" "std" "termios" "use-libc-auxv" ]; + }; + "rustversion" = rec { + crateName = "rustversion"; + version = "1.0.12"; + edition = "2018"; + sha256 = "01mzns4b7vfcxsyf63ck68gachqcbqzsfs6iwzrv6j449p70hcjg"; + procMacro = true; + build = "build/build.rs"; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + ]; + + }; + "rusty-fork" = rec { + crateName = "rusty-fork"; + version = "0.3.0"; + edition = "2018"; + sha256 = "0kxwq5c480gg6q0j3bg4zzyfh2kwmc3v2ba94jw8ncjc8mpcqgfb"; + authors = [ + "Jason Lingle" + ]; + dependencies = [ + { + name = "fnv"; + packageId = "fnv"; + } + { + name = "quick-error"; + packageId = "quick-error 1.2.3"; + } + { + name = "tempfile"; + packageId = "tempfile"; + } + { + name = "wait-timeout"; + packageId = "wait-timeout"; + optional = true; + } + ]; + features = { + "default" = [ "timeout" ]; + "timeout" = [ "wait-timeout" ]; + "wait-timeout" = [ "dep:wait-timeout" ]; + }; + resolvedDefaultFeatures = [ "timeout" "wait-timeout" ]; + }; + "rustyline" = rec { + crateName = "rustyline"; + version = "10.1.1"; + edition = "2018"; + sha256 = "1vvsd68cch0lpcg6mcwfvfdd6r4cxbwis3bf9443phzkqcr3rs61"; + authors = [ + "Katsu Kawakami <kkawa1570@gmail.com>" + ]; + dependencies = [ + { + name = "bitflags"; + packageId = "bitflags"; + } + { + name = "cfg-if"; + packageId = "cfg-if"; + } + { + name = "clipboard-win"; + packageId = "clipboard-win"; + target = { target, features }: (target."windows" or false); + } + { + name = "dirs-next"; + packageId = "dirs-next"; + optional = true; + } + { + name = "fd-lock"; + packageId = "fd-lock"; + } + { + name = "libc"; + packageId = "libc"; + } + { + name = "log"; + packageId = "log"; + } + { + name = "memchr"; + packageId = "memchr"; + } + { + name = "nix"; + packageId = "nix"; + usesDefaultFeatures = false; + target = { target, features }: (target."unix" or false); + features = [ "fs" "ioctl" "poll" "signal" "term" ]; + } + { + name = "radix_trie"; + packageId = "radix_trie"; + optional = true; + } + { + name = "scopeguard"; + packageId = "scopeguard"; + target = { target, features }: (target."windows" or false); + } + { + name = "unicode-segmentation"; + packageId = "unicode-segmentation"; + } + { + name = "unicode-width"; + packageId = "unicode-width"; + } + { + name = "utf8parse"; + packageId = "utf8parse"; + target = { target, features }: (target."unix" or false); + } + { + name = "winapi"; + packageId = "winapi"; + target = { target, features }: (target."windows" or false); + features = [ "consoleapi" "handleapi" "synchapi" "minwindef" "processenv" "std" "winbase" "wincon" "winuser" ]; + } + ]; + features = { + "case_insensitive_history_search" = [ "regex" ]; + "custom-bindings" = [ "radix_trie" ]; + "default" = [ "custom-bindings" "with-dirs" ]; + "dirs-next" = [ "dep:dirs-next" ]; + "radix_trie" = [ "dep:radix_trie" ]; + "regex" = [ "dep:regex" ]; + "signal-hook" = [ "dep:signal-hook" ]; + "skim" = [ "dep:skim" ]; + "with-dirs" = [ "dirs-next" ]; + "with-fuzzy" = [ "skim" ]; + }; + resolvedDefaultFeatures = [ "custom-bindings" "default" "dirs-next" "radix_trie" "with-dirs" ]; + }; + "ryu" = rec { + crateName = "ryu"; + version = "1.0.13"; + edition = "2018"; + sha256 = "0hchlxvjmsz51l06c7r8zwj45pm8bhc3x3czcih27rkx8v03j4zr"; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + ]; + features = { + "no-panic" = [ "dep:no-panic" ]; + }; + }; + "same-file" = rec { + crateName = "same-file"; + version = "1.0.6"; + edition = "2018"; + sha256 = "00h5j1w87dmhnvbv9l8bic3y7xxsnjmssvifw2ayvgx9mb1ivz4k"; + authors = [ + "Andrew Gallant <jamslam@gmail.com>" + ]; + dependencies = [ + { + name = "winapi-util"; + packageId = "winapi-util"; + target = { target, features }: (target."windows" or false); + } + ]; + + }; + "scopeguard" = rec { + crateName = "scopeguard"; + version = "1.1.0"; + edition = "2015"; + sha256 = "1kbqm85v43rq92vx7hfiay6pmcga03vrjbbfwqpyj3pwsg3b16nj"; + authors = [ + "bluss" + ]; + features = { + "default" = [ "use_std" ]; + }; + resolvedDefaultFeatures = [ "default" "use_std" ]; + }; + "serde" = rec { + crateName = "serde"; + version = "1.0.162"; + edition = "2015"; + sha256 = "1dksgs0zi9wdh3bm3gzzsvmgg39fn8vb4d8gbz09haswmghzdcki"; + authors = [ + "Erick Tryzelaar <erick.tryzelaar@gmail.com>" + "David Tolnay <dtolnay@gmail.com>" + ]; + dependencies = [ + { + name = "serde_derive"; + packageId = "serde_derive"; + optional = true; + } + ]; + devDependencies = [ + { + name = "serde_derive"; + packageId = "serde_derive"; + } + ]; + features = { + "default" = [ "std" ]; + "derive" = [ "serde_derive" ]; + "serde_derive" = [ "dep:serde_derive" ]; + }; + resolvedDefaultFeatures = [ "alloc" "default" "derive" "rc" "serde_derive" "std" ]; + }; + "serde_derive" = rec { + crateName = "serde_derive"; + version = "1.0.162"; + edition = "2015"; + sha256 = "1diwx4c86b63mgmzbd5nvj8imjwhipm48jlhi62bar7xa91q3852"; + procMacro = true; + authors = [ + "Erick Tryzelaar <erick.tryzelaar@gmail.com>" + "David Tolnay <dtolnay@gmail.com>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "syn"; + packageId = "syn 2.0.15"; + } + ]; + features = { }; + resolvedDefaultFeatures = [ "default" ]; + }; + "serde_json" = rec { + crateName = "serde_json"; + version = "1.0.96"; + edition = "2018"; + sha256 = "1waj3qwpa610vmksnzcmkll6vaw7nf7v3ckj4v0wlfs0a153jz85"; + authors = [ + "Erick Tryzelaar <erick.tryzelaar@gmail.com>" + "David Tolnay <dtolnay@gmail.com>" + ]; + dependencies = [ + { + name = "itoa"; + packageId = "itoa"; + } + { + name = "ryu"; + packageId = "ryu"; + } + { + name = "serde"; + packageId = "serde"; + usesDefaultFeatures = false; + } + ]; + devDependencies = [ + { + name = "serde"; + packageId = "serde"; + features = [ "derive" ]; + } + ]; + features = { + "alloc" = [ "serde/alloc" ]; + "default" = [ "std" ]; + "indexmap" = [ "dep:indexmap" ]; + "preserve_order" = [ "indexmap" "std" ]; + "std" = [ "serde/std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "serde_spanned" = rec { + crateName = "serde_spanned"; + version = "0.6.1"; + edition = "2021"; + sha256 = "1x7wqhqay4bgkaq8dmvb9z01mk2z0j0g9jl5nb9ynv3aanpqrz8f"; + dependencies = [ + { + name = "serde"; + packageId = "serde"; + optional = true; + } + ]; + features = { + "serde" = [ "dep:serde" ]; + }; + resolvedDefaultFeatures = [ "serde" ]; + }; + "sha-1" = rec { + crateName = "sha-1"; + version = "0.8.2"; + edition = "2015"; + sha256 = "1pv387q0r7llk2cqzyq0nivzvkgqgzsiygqzlv7b68z9xl5lvngp"; + libName = "sha1"; + authors = [ + "RustCrypto Developers" + ]; + dependencies = [ + { + name = "block-buffer"; + packageId = "block-buffer 0.7.3"; + } + { + name = "digest"; + packageId = "digest 0.8.1"; + } + { + name = "fake-simd"; + packageId = "fake-simd"; + } + { + name = "opaque-debug"; + packageId = "opaque-debug"; + } + ]; + devDependencies = [ + { + name = "digest"; + packageId = "digest 0.8.1"; + features = [ "dev" ]; + } + ]; + features = { + "asm" = [ "sha1-asm" ]; + "asm-aarch64" = [ "asm" "libc" ]; + "default" = [ "std" ]; + "libc" = [ "dep:libc" ]; + "sha1-asm" = [ "dep:sha1-asm" ]; + "std" = [ "digest/std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "sha2 0.10.6" = rec { + crateName = "sha2"; + version = "0.10.6"; + edition = "2018"; + sha256 = "1h5xrrv2y06kr1gsz4pwrm3lsp206nm2gjxgbf21wfrfzsavgrl2"; + authors = [ + "RustCrypto Developers" + ]; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + { + name = "cpufeatures"; + packageId = "cpufeatures"; + target = { target, features }: (("aarch64" == target."arch") || ("x86_64" == target."arch") || ("x86" == target."arch")); + } + { + name = "digest"; + packageId = "digest 0.10.6"; + } + ]; + devDependencies = [ + { + name = "digest"; + packageId = "digest 0.10.6"; + features = [ "dev" ]; + } + ]; + features = { + "asm" = [ "sha2-asm" ]; + "asm-aarch64" = [ "asm" ]; + "default" = [ "std" ]; + "oid" = [ "digest/oid" ]; + "sha2-asm" = [ "dep:sha2-asm" ]; + "std" = [ "digest/std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "sha2 0.8.2" = rec { + crateName = "sha2"; + version = "0.8.2"; + edition = "2015"; + sha256 = "0s9yddvyg6anaikdl86wmwfim25c0d4m0xq0y2ghs34alxpg8mm2"; + authors = [ + "RustCrypto Developers" + ]; + dependencies = [ + { + name = "block-buffer"; + packageId = "block-buffer 0.7.3"; + } + { + name = "digest"; + packageId = "digest 0.8.1"; + } + { + name = "fake-simd"; + packageId = "fake-simd"; + } + { + name = "opaque-debug"; + packageId = "opaque-debug"; + } + ]; + devDependencies = [ + { + name = "digest"; + packageId = "digest 0.8.1"; + features = [ "dev" ]; + } + ]; + features = { + "asm" = [ "sha2-asm" ]; + "asm-aarch64" = [ "asm" "libc" ]; + "default" = [ "std" ]; + "libc" = [ "dep:libc" ]; + "sha2-asm" = [ "dep:sha2-asm" ]; + "std" = [ "digest/std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "sharded-slab" = rec { + crateName = "sharded-slab"; + version = "0.1.4"; + edition = "2018"; + sha256 = "0cbb8kgwsyr3zzhsv8jrs3y1j3vsw4jxil42lfq31ikhdy0bl3wh"; + authors = [ + "Eliza Weisman <eliza@buoyant.io>" + ]; + dependencies = [ + { + name = "lazy_static"; + packageId = "lazy_static"; + } + ]; + features = { + "loom" = [ "dep:loom" ]; + }; + }; + "slab" = rec { + crateName = "slab"; + version = "0.4.8"; + edition = "2018"; + sha256 = "0bgwxig8gkqp6gz8rvrpdj6qwa10karnsxwx7wsj5ay8kcf3aa35"; + authors = [ + "Carl Lerche <me@carllerche.com>" + ]; + buildDependencies = [ + { + name = "autocfg"; + packageId = "autocfg"; + } + ]; + features = { + "default" = [ "std" ]; + "serde" = [ "dep:serde" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "sled" = rec { + crateName = "sled"; + version = "0.34.7"; + edition = "2018"; + sha256 = "0dcr2s7cylj5mb33ci3kpx7fz797jwvysnl5airrir9cgirv95kz"; + authors = [ + "Tyler Neely <t@jujit.su>" + ]; + dependencies = [ + { + name = "crc32fast"; + packageId = "crc32fast"; + } + { + name = "crossbeam-epoch"; + packageId = "crossbeam-epoch"; + } + { + name = "crossbeam-utils"; + packageId = "crossbeam-utils"; + } + { + name = "fs2"; + packageId = "fs2"; + target = { target, features }: (("linux" == target."os") || ("macos" == target."os") || ("windows" == target."os")); + } + { + name = "fxhash"; + packageId = "fxhash"; + } + { + name = "libc"; + packageId = "libc"; + } + { + name = "log"; + packageId = "log"; + } + { + name = "parking_lot"; + packageId = "parking_lot"; + } + { + name = "zstd"; + packageId = "zstd"; + optional = true; + } + ]; + devDependencies = [ + { + name = "log"; + packageId = "log"; + } + ]; + features = { + "backtrace" = [ "dep:backtrace" ]; + "color-backtrace" = [ "dep:color-backtrace" ]; + "compression" = [ "zstd" ]; + "default" = [ "no_metrics" ]; + "io_uring" = [ "rio" ]; + "no_logs" = [ "log/max_level_off" ]; + "pretty_backtrace" = [ "color-backtrace" ]; + "rio" = [ "dep:rio" ]; + "testing" = [ "event_log" "lock_free_delays" "compression" "failpoints" "backtrace" ]; + "zstd" = [ "dep:zstd" ]; + }; + resolvedDefaultFeatures = [ "compression" "default" "no_metrics" "zstd" ]; + }; + "smallvec" = rec { + crateName = "smallvec"; + version = "1.10.0"; + edition = "2018"; + sha256 = "1q2k15fzxgwjpcdv3f323w24rbbfyv711ayz85ila12lg7zbw1x5"; + authors = [ + "The Servo Project Developers" + ]; + features = { + "arbitrary" = [ "dep:arbitrary" ]; + "const_new" = [ "const_generics" ]; + "serde" = [ "dep:serde" ]; + }; + }; + "smol_str" = rec { + crateName = "smol_str"; + version = "0.2.0"; + edition = "2018"; + sha256 = "1779hpx5ipbcvkdj5zw8zqk3ynn160qvls1gkcr54hwsprmjw8bl"; + authors = [ + "Aleksey Kladov <aleksey.kladov@gmail.com>" + ]; + dependencies = [ + { + name = "serde"; + packageId = "serde"; + optional = true; + usesDefaultFeatures = false; + } + ]; + devDependencies = [ + { + name = "serde"; + packageId = "serde"; + features = [ "derive" ]; + } + ]; + features = { + "arbitrary" = [ "dep:arbitrary" ]; + "default" = [ "std" ]; + "serde" = [ "dep:serde" ]; + "std" = [ "serde?/std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "socket2" = rec { + crateName = "socket2"; + version = "0.4.9"; + edition = "2018"; + sha256 = "0qnn1r41jqj20m0a2nzzjgzndlmpg5maiyjchccaypfqxq8sk934"; + authors = [ + "Alex Crichton <alex@alexcrichton.com>" + "Thomas de Zeeuw <thomasdezeeuw@gmail.com>" + ]; + dependencies = [ + { + name = "libc"; + packageId = "libc"; + target = { target, features }: (target."unix" or false); + } + { + name = "winapi"; + packageId = "winapi"; + target = { target, features }: (target."windows" or false); + features = [ "handleapi" "ws2ipdef" "ws2tcpip" ]; + } + ]; + features = { }; + resolvedDefaultFeatures = [ "all" ]; + }; + "ssri" = rec { + crateName = "ssri"; + version = "7.0.0"; + edition = "2018"; + sha256 = "1wi3yk801a0bgkd51ly83dxzjfq5726hwq5asxwvx7zki39w1km9"; + authors = [ + "Kat Marchán <kzm@zkat.tech>" + ]; + dependencies = [ + { + name = "base64"; + packageId = "base64 0.10.1"; + } + { + name = "digest"; + packageId = "digest 0.8.1"; + } + { + name = "hex"; + packageId = "hex"; + } + { + name = "serde"; + packageId = "serde"; + optional = true; + } + { + name = "sha-1"; + packageId = "sha-1"; + } + { + name = "sha2"; + packageId = "sha2 0.8.2"; + } + { + name = "thiserror"; + packageId = "thiserror"; + } + ]; + features = { + "default" = [ "serde" ]; + "serde" = [ "dep:serde" ]; + }; + resolvedDefaultFeatures = [ "default" "serde" ]; + }; + "static_assertions" = rec { + crateName = "static_assertions"; + version = "1.1.0"; + edition = "2015"; + sha256 = "0gsl6xmw10gvn3zs1rv99laj5ig7ylffnh71f9l34js4nr4r7sx2"; + authors = [ + "Nikolai Vazquez" + ]; + features = { }; + }; + "str-buf" = rec { + crateName = "str-buf"; + version = "1.0.6"; + edition = "2018"; + sha256 = "1l7q4nha7wpsr0970bfqm773vhmpwr9l6rr8r4gwgrh46wvdh24y"; + authors = [ + "Douman <douman@gmx.se>" + ]; + features = { + "serde" = [ "dep:serde" ]; + }; + }; + "strsim" = rec { + crateName = "strsim"; + version = "0.10.0"; + edition = "2015"; + sha256 = "08s69r4rcrahwnickvi0kq49z524ci50capybln83mg6b473qivk"; + authors = [ + "Danny Guo <danny@dannyguo.com>" + ]; + + }; + "structmeta" = rec { + crateName = "structmeta"; + version = "0.1.6"; + edition = "2021"; + sha256 = "0alyl12b7fab8izrpliil73sxs1ivr5vm0pisallmxlb4zb44j0h"; + authors = [ + "frozenlib" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "structmeta-derive"; + packageId = "structmeta-derive"; + } + { + name = "syn"; + packageId = "syn 1.0.109"; + } + ]; + devDependencies = [ + { + name = "syn"; + packageId = "syn 1.0.109"; + features = [ "extra-traits" "full" ]; + } + ]; + + }; + "structmeta-derive" = rec { + crateName = "structmeta-derive"; + version = "0.1.6"; + edition = "2021"; + sha256 = "14vxik2m3dm7bwx016qfz062fwznkbq02fyq8vby545m0pj0nhi4"; + procMacro = true; + authors = [ + "frozenlib" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "syn"; + packageId = "syn 1.0.109"; + } + ]; + devDependencies = [ + { + name = "syn"; + packageId = "syn 1.0.109"; + features = [ "extra-traits" ]; + } + ]; + + }; + "subtle" = rec { + crateName = "subtle"; + version = "2.4.1"; + edition = "2015"; + sha256 = "00b6jzh9gzb0h9n25g06nqr90z3xzqppfhhb260s1hjhh4pg7pkb"; + authors = [ + "Isis Lovecruft <isis@patternsinthevoid.net>" + "Henry de Valence <hdevalence@hdevalence.ca>" + ]; + features = { + "default" = [ "std" "i128" ]; + }; + }; + "syn 0.15.44" = rec { + crateName = "syn"; + version = "0.15.44"; + edition = "2015"; + sha256 = "1id5g6x6zihv3j7hwrw3m1jp636bg8dpi671r7zy3jvpkavb794w"; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 0.4.30"; + usesDefaultFeatures = false; + } + { + name = "quote"; + packageId = "quote 0.6.13"; + optional = true; + usesDefaultFeatures = false; + } + { + name = "unicode-xid"; + packageId = "unicode-xid"; + } + ]; + features = { + "default" = [ "derive" "parsing" "printing" "clone-impls" "proc-macro" ]; + "printing" = [ "quote" ]; + "proc-macro" = [ "proc-macro2/proc-macro" "quote/proc-macro" ]; + "quote" = [ "dep:quote" ]; + }; + resolvedDefaultFeatures = [ "clone-impls" "default" "derive" "full" "parsing" "printing" "proc-macro" "quote" ]; + }; + "syn 1.0.109" = rec { + crateName = "syn"; + version = "1.0.109"; + edition = "2018"; + sha256 = "0ds2if4600bd59wsv7jjgfkayfzy3hnazs394kz6zdkmna8l3dkj"; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + usesDefaultFeatures = false; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + optional = true; + usesDefaultFeatures = false; + } + { + name = "unicode-ident"; + packageId = "unicode-ident"; + } + ]; + features = { + "default" = [ "derive" "parsing" "printing" "clone-impls" "proc-macro" ]; + "printing" = [ "quote" ]; + "proc-macro" = [ "proc-macro2/proc-macro" "quote/proc-macro" ]; + "quote" = [ "dep:quote" ]; + "test" = [ "syn-test-suite/all-features" ]; + }; + resolvedDefaultFeatures = [ "clone-impls" "default" "derive" "extra-traits" "full" "parsing" "printing" "proc-macro" "quote" "visit" "visit-mut" ]; + }; + "syn 2.0.15" = rec { + crateName = "syn"; + version = "2.0.15"; + edition = "2021"; + sha256 = "08n8c235bj7f86a5jg561s5zjfijdn8jw6ih2im7xxb0iczcykx3"; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + usesDefaultFeatures = false; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + optional = true; + usesDefaultFeatures = false; + } + { + name = "unicode-ident"; + packageId = "unicode-ident"; + } + ]; + features = { + "default" = [ "derive" "parsing" "printing" "clone-impls" "proc-macro" ]; + "printing" = [ "quote" ]; + "proc-macro" = [ "proc-macro2/proc-macro" "quote/proc-macro" ]; + "quote" = [ "dep:quote" ]; + "test" = [ "syn-test-suite/all-features" ]; + }; + resolvedDefaultFeatures = [ "clone-impls" "default" "derive" "extra-traits" "full" "parsing" "printing" "proc-macro" "quote" "visit-mut" ]; + }; + "sync_wrapper" = rec { + crateName = "sync_wrapper"; + version = "0.1.2"; + edition = "2018"; + sha256 = "0q01lyj0gr9a93n10nxsn8lwbzq97jqd6b768x17c8f7v7gccir0"; + authors = [ + "Actyx AG <developer@actyx.io>" + ]; + features = { + "futures" = [ "futures-core" ]; + "futures-core" = [ "dep:futures-core" ]; + }; + }; + "tabwriter" = rec { + crateName = "tabwriter"; + version = "1.2.1"; + edition = "2018"; + sha256 = "048i0mj3b07zlry9m5fl706y5bzdzgrswymqn32drakzk7y5q81n"; + authors = [ + "Andrew Gallant <jamslam@gmail.com>" + ]; + dependencies = [ + { + name = "unicode-width"; + packageId = "unicode-width"; + } + ]; + features = { + "ansi_formatting" = [ "regex" "lazy_static" ]; + "lazy_static" = [ "dep:lazy_static" ]; + "regex" = [ "dep:regex" ]; + }; + resolvedDefaultFeatures = [ "default" ]; + }; + "tempdir" = rec { + crateName = "tempdir"; + version = "0.3.7"; + edition = "2015"; + sha256 = "1n5n86zxpgd85y0mswrp5cfdisizq2rv3la906g6ipyc03xvbwhm"; + authors = [ + "The Rust Project Developers" + ]; + dependencies = [ + { + name = "rand"; + packageId = "rand 0.4.6"; + } + { + name = "remove_dir_all"; + packageId = "remove_dir_all"; + } + ]; + + }; + "tempfile" = rec { + crateName = "tempfile"; + version = "3.5.0"; + edition = "2018"; + sha256 = "163rp254r3x7i5hisagrpxid2166pq94jvk511dpkmc1yf2fryxr"; + authors = [ + "Steven Allen <steven@stebalien.com>" + "The Rust Project Developers" + "Ashley Mannix <ashleymannix@live.com.au>" + "Jason White <me@jasonwhite.io>" + ]; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + { + name = "fastrand"; + packageId = "fastrand"; + } + { + name = "redox_syscall"; + packageId = "redox_syscall 0.3.5"; + target = { target, features }: ("redox" == target."os"); + } + { + name = "rustix"; + packageId = "rustix"; + target = { target, features }: ((target."unix" or false) || ("wasi" == target."os")); + features = [ "fs" ]; + } + { + name = "windows-sys"; + packageId = "windows-sys 0.45.0"; + target = { target, features }: (target."windows" or false); + features = [ "Win32_Storage_FileSystem" "Win32_Foundation" ]; + } + ]; + features = { }; + }; + "termcolor" = rec { + crateName = "termcolor"; + version = "1.2.0"; + edition = "2018"; + sha256 = "1dmrbsljxpfng905qkaxljlwjhv8h0i3969cbiv5rb7y8a4wymdy"; + authors = [ + "Andrew Gallant <jamslam@gmail.com>" + ]; + dependencies = [ + { + name = "winapi-util"; + packageId = "winapi-util"; + target = { target, features }: (target."windows" or false); + } + ]; + + }; + "test-case" = rec { + crateName = "test-case"; + version = "2.2.2"; + edition = "2018"; + sha256 = "1h4qymhy332lzgg79w696qfxg6wdab5birn8xvfgkczzgmdczmi1"; + authors = [ + "Marcin Sas-Szymanski <marcin.sas-szymanski@anixe.pl>" + "Wojciech Polak <frondeus@gmail.com>" + "Łukasz Biel <lukasz.p.biel@gmail.com>" + ]; + dependencies = [ + { + name = "test-case-macros"; + packageId = "test-case-macros"; + usesDefaultFeatures = false; + } + ]; + features = { + "regex" = [ "dep:regex" ]; + "with-regex" = [ "regex" "test-case-macros/with-regex" ]; + }; + }; + "test-case-macros" = rec { + crateName = "test-case-macros"; + version = "2.2.2"; + edition = "2018"; + sha256 = "09jvbfvz48v6ya3i25gp3lbr6ym1fz7qyp3l6bcdslwkw7v7nnz4"; + procMacro = true; + authors = [ + "Marcin Sas-Szymanski <marcin.sas-szymanski@anixe.pl>" + "Wojciech Polak <frondeus@gmail.com>" + "Łukasz Biel <lukasz.p.biel@gmail.com>" + ]; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + { + name = "proc-macro-error"; + packageId = "proc-macro-error"; + } + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "syn"; + packageId = "syn 1.0.109"; + features = [ "full" "extra-traits" ]; + } + ]; + features = { }; + }; + "test-generator" = rec { + crateName = "test-generator"; + version = "0.3.0"; + edition = "2018"; + workspace_member = null; + src = pkgs.fetchgit { + url = "https://github.com/JamesGuthrie/test-generator.git"; + rev = "82e799979980962aec1aa324ec6e0e4cad781f41"; + sha256 = "08brp3qqa55hijc7xby3lam2cc84hvx1zzfqv6lj7smlczh8k32y"; + }; + procMacro = true; + authors = [ + "Frank Rehberger <frehberg@gmail.com>" + ]; + dependencies = [ + { + name = "glob"; + packageId = "glob"; + } + { + name = "proc-macro2"; + packageId = "proc-macro2 0.4.30"; + } + { + name = "quote"; + packageId = "quote 0.6.13"; + } + { + name = "syn"; + packageId = "syn 0.15.44"; + features = [ "full" ]; + } + ]; + + }; + "test-strategy" = rec { + crateName = "test-strategy"; + version = "0.2.1"; + edition = "2021"; + sha256 = "105lxqs0vnqff5821sgns8q1scvrwfx1yw6iz7i7nr862j6l1mk2"; + procMacro = true; + authors = [ + "frozenlib" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "structmeta"; + packageId = "structmeta"; + } + { + name = "syn"; + packageId = "syn 1.0.109"; + features = [ "visit" "full" ]; + } + ]; + + }; + "text-size" = rec { + crateName = "text-size"; + version = "1.1.0"; + edition = "2018"; + sha256 = "02jn26l5wcdjqpy80ycnk9ha10flyc0p4yga8ci6aaz7vd4bb318"; + authors = [ + "Aleksey Kladov <aleksey.kladov@gmail.com>" + "Christopher Durham (CAD97) <cad97@cad97.com>" + ]; + features = { + "serde" = [ "dep:serde" ]; + }; + }; + "textwrap" = rec { + crateName = "textwrap"; + version = "0.16.0"; + edition = "2021"; + sha256 = "0gbwkjf15l6p3x2rkr75fa4cpcs1ly4c8pmlfx5bl6zybcm24ai2"; + authors = [ + "Martin Geisler <martin@geisler.net>" + ]; + features = { + "default" = [ "unicode-linebreak" "unicode-width" "smawk" ]; + "hyphenation" = [ "dep:hyphenation" ]; + "smawk" = [ "dep:smawk" ]; + "terminal_size" = [ "dep:terminal_size" ]; + "unicode-linebreak" = [ "dep:unicode-linebreak" ]; + "unicode-width" = [ "dep:unicode-width" ]; + }; + }; + "thiserror" = rec { + crateName = "thiserror"; + version = "1.0.40"; + edition = "2018"; + sha256 = "1b7bdhriasdsr99y39d50jz995xaz9sw3hsbb6z9kp6q9cqrm34p"; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + ]; + dependencies = [ + { + name = "thiserror-impl"; + packageId = "thiserror-impl"; + } + ]; + + }; + "thiserror-impl" = rec { + crateName = "thiserror-impl"; + version = "1.0.40"; + edition = "2018"; + sha256 = "17sn41kyimc6s983aypkk6a45pcyrkbkvrw6rp407n5hqm16ligr"; + procMacro = true; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "syn"; + packageId = "syn 2.0.15"; + } + ]; + + }; + "thread_local" = rec { + crateName = "thread_local"; + version = "1.1.7"; + edition = "2021"; + sha256 = "0lp19jdgvp5m4l60cgxdnl00yw1hlqy8gcywg9bddwng9h36zp9z"; + authors = [ + "Amanieu d'Antras <amanieu@gmail.com>" + ]; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + { + name = "once_cell"; + packageId = "once_cell"; + } + ]; + features = { }; + }; + "tinytemplate" = rec { + crateName = "tinytemplate"; + version = "1.2.1"; + edition = "2015"; + sha256 = "1g5n77cqkdh9hy75zdb01adxn45mkh9y40wdr7l68xpz35gnnkdy"; + authors = [ + "Brook Heisler <brookheisler@gmail.com>" + ]; + dependencies = [ + { + name = "serde"; + packageId = "serde"; + } + { + name = "serde_json"; + packageId = "serde_json"; + } + ]; + + }; + "tokio" = rec { + crateName = "tokio"; + version = "1.28.0"; + edition = "2021"; + sha256 = "0vqk7dkmvadzqrxwlgja04wlf4s8iymjk6yvcshs7r9lh6zqdiy3"; + authors = [ + "Tokio Contributors <team@tokio.rs>" + ]; + dependencies = [ + { + name = "bytes"; + packageId = "bytes"; + optional = true; + } + { + name = "libc"; + packageId = "libc"; + optional = true; + target = { target, features }: (target."unix" or false); + } + { + name = "mio"; + packageId = "mio"; + optional = true; + } + { + name = "num_cpus"; + packageId = "num_cpus"; + optional = true; + } + { + name = "pin-project-lite"; + packageId = "pin-project-lite"; + } + { + name = "socket2"; + packageId = "socket2"; + optional = true; + target = { target, features }: (!(("wasm32" == target."arch") || ("wasm64" == target."arch"))); + features = [ "all" ]; + } + { + name = "tokio-macros"; + packageId = "tokio-macros"; + optional = true; + } + { + name = "windows-sys"; + packageId = "windows-sys 0.48.0"; + target = { target, features }: (target."docsrs" or false); + features = [ "Win32_Foundation" "Win32_Security_Authorization" ]; + } + { + name = "windows-sys"; + packageId = "windows-sys 0.48.0"; + optional = true; + target = { target, features }: (target."windows" or false); + } + ]; + buildDependencies = [ + { + name = "autocfg"; + packageId = "autocfg"; + } + ]; + devDependencies = [ + { + name = "libc"; + packageId = "libc"; + target = { target, features }: (target."unix" or false); + } + { + name = "socket2"; + packageId = "socket2"; + target = { target, features }: (!(("wasm32" == target."arch") || ("wasm64" == target."arch"))); + } + ]; + features = { + "bytes" = [ "dep:bytes" ]; + "full" = [ "fs" "io-util" "io-std" "macros" "net" "parking_lot" "process" "rt" "rt-multi-thread" "signal" "sync" "time" ]; + "io-util" = [ "bytes" ]; + "libc" = [ "dep:libc" ]; + "macros" = [ "tokio-macros" ]; + "mio" = [ "dep:mio" ]; + "net" = [ "libc" "mio/os-poll" "mio/os-ext" "mio/net" "socket2" "windows-sys/Win32_Foundation" "windows-sys/Win32_Security" "windows-sys/Win32_Storage_FileSystem" "windows-sys/Win32_System_Pipes" "windows-sys/Win32_System_SystemServices" ]; + "num_cpus" = [ "dep:num_cpus" ]; + "parking_lot" = [ "dep:parking_lot" ]; + "process" = [ "bytes" "libc" "mio/os-poll" "mio/os-ext" "mio/net" "signal-hook-registry" "windows-sys/Win32_Foundation" "windows-sys/Win32_System_Threading" "windows-sys/Win32_System_WindowsProgramming" ]; + "rt-multi-thread" = [ "num_cpus" "rt" ]; + "signal" = [ "libc" "mio/os-poll" "mio/net" "mio/os-ext" "signal-hook-registry" "windows-sys/Win32_Foundation" "windows-sys/Win32_System_Console" ]; + "signal-hook-registry" = [ "dep:signal-hook-registry" ]; + "socket2" = [ "dep:socket2" ]; + "test-util" = [ "rt" "sync" "time" ]; + "tokio-macros" = [ "dep:tokio-macros" ]; + "tracing" = [ "dep:tracing" ]; + "windows-sys" = [ "dep:windows-sys" ]; + }; + resolvedDefaultFeatures = [ "bytes" "default" "io-std" "io-util" "libc" "macros" "mio" "net" "num_cpus" "rt" "rt-multi-thread" "socket2" "sync" "time" "tokio-macros" "windows-sys" ]; + }; + "tokio-io-timeout" = rec { + crateName = "tokio-io-timeout"; + version = "1.2.0"; + edition = "2018"; + sha256 = "1gx84f92q1491vj4pkn81j8pz1s3pgwnbrsdhfsa2556mli41drh"; + authors = [ + "Steven Fackler <sfackler@gmail.com>" + ]; + dependencies = [ + { + name = "pin-project-lite"; + packageId = "pin-project-lite"; + } + { + name = "tokio"; + packageId = "tokio"; + features = [ "time" ]; + } + ]; + devDependencies = [ + { + name = "tokio"; + packageId = "tokio"; + features = [ "full" ]; + } + ]; + + }; + "tokio-macros" = rec { + crateName = "tokio-macros"; + version = "2.1.0"; + edition = "2018"; + sha256 = "0pk7y9dfanab886iaqwcbri39jkw33kgl7y07v0kg1pp8prdq2v3"; + procMacro = true; + authors = [ + "Tokio Contributors <team@tokio.rs>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "syn"; + packageId = "syn 2.0.15"; + features = [ "full" ]; + } + ]; + + }; + "tokio-stream" = rec { + crateName = "tokio-stream"; + version = "0.1.14"; + edition = "2021"; + sha256 = "0hi8hcwavh5sdi1ivc9qc4yvyr32f153c212dpd7sb366y6rhz1r"; + authors = [ + "Tokio Contributors <team@tokio.rs>" + ]; + dependencies = [ + { + name = "futures-core"; + packageId = "futures-core"; + } + { + name = "pin-project-lite"; + packageId = "pin-project-lite"; + } + { + name = "tokio"; + packageId = "tokio"; + features = [ "sync" ]; + } + ]; + devDependencies = [ + { + name = "tokio"; + packageId = "tokio"; + features = [ "full" "test-util" ]; + } + ]; + features = { + "default" = [ "time" ]; + "fs" = [ "tokio/fs" ]; + "full" = [ "time" "net" "io-util" "fs" "sync" "signal" ]; + "io-util" = [ "tokio/io-util" ]; + "net" = [ "tokio/net" ]; + "signal" = [ "tokio/signal" ]; + "sync" = [ "tokio/sync" "tokio-util" ]; + "time" = [ "tokio/time" ]; + "tokio-util" = [ "dep:tokio-util" ]; + }; + resolvedDefaultFeatures = [ "default" "net" "time" ]; + }; + "tokio-util" = rec { + crateName = "tokio-util"; + version = "0.7.8"; + edition = "2021"; + sha256 = "0zfvblvskaj65rianw6agz9hnbdbsfgf5fkw4srwik3yr31fhvw0"; + authors = [ + "Tokio Contributors <team@tokio.rs>" + ]; + dependencies = [ + { + name = "bytes"; + packageId = "bytes"; + } + { + name = "futures-core"; + packageId = "futures-core"; + } + { + name = "futures-sink"; + packageId = "futures-sink"; + } + { + name = "pin-project-lite"; + packageId = "pin-project-lite"; + } + { + name = "tokio"; + packageId = "tokio"; + features = [ "sync" ]; + } + { + name = "tracing"; + packageId = "tracing"; + optional = true; + usesDefaultFeatures = false; + features = [ "std" ]; + } + ]; + devDependencies = [ + { + name = "tokio"; + packageId = "tokio"; + features = [ "full" ]; + } + ]; + features = { + "__docs_rs" = [ "futures-util" ]; + "codec" = [ "tracing" ]; + "compat" = [ "futures-io" ]; + "full" = [ "codec" "compat" "io-util" "time" "net" "rt" ]; + "futures-io" = [ "dep:futures-io" ]; + "futures-util" = [ "dep:futures-util" ]; + "hashbrown" = [ "dep:hashbrown" ]; + "io-util" = [ "io" "tokio/rt" "tokio/io-util" ]; + "net" = [ "tokio/net" ]; + "rt" = [ "tokio/rt" "tokio/sync" "futures-util" "hashbrown" ]; + "slab" = [ "dep:slab" ]; + "time" = [ "tokio/time" "slab" ]; + "tracing" = [ "dep:tracing" ]; + }; + resolvedDefaultFeatures = [ "codec" "default" "io" "io-util" "tracing" ]; + }; + "toml" = rec { + crateName = "toml"; + version = "0.6.0"; + edition = "2021"; + sha256 = "05zjz69wjymp9yrgccg5vhvxpf855rgn23vl1yvri4nwwj8difag"; + authors = [ + "Alex Crichton <alex@alexcrichton.com>" + ]; + dependencies = [ + { + name = "serde"; + packageId = "serde"; + } + { + name = "serde_spanned"; + packageId = "serde_spanned"; + features = [ "serde" ]; + } + { + name = "toml_datetime"; + packageId = "toml_datetime"; + features = [ "serde" ]; + } + { + name = "toml_edit"; + packageId = "toml_edit"; + optional = true; + features = [ "serde" ]; + } + ]; + devDependencies = [ + { + name = "serde"; + packageId = "serde"; + features = [ "derive" ]; + } + ]; + features = { + "default" = [ "parse" "display" ]; + "display" = [ "dep:toml_edit" ]; + "indexmap" = [ "dep:indexmap" ]; + "parse" = [ "dep:toml_edit" ]; + "preserve_order" = [ "indexmap" ]; + }; + resolvedDefaultFeatures = [ "default" "display" "parse" ]; + }; + "toml_datetime" = rec { + crateName = "toml_datetime"; + version = "0.5.1"; + edition = "2021"; + sha256 = "1xcw3kyklh3s2gxp65ma26rgkl7505la4xx1r55kfgcfmikz8ls5"; + authors = [ + "Alex Crichton <alex@alexcrichton.com>" + ]; + dependencies = [ + { + name = "serde"; + packageId = "serde"; + optional = true; + } + ]; + features = { + "serde" = [ "dep:serde" ]; + }; + resolvedDefaultFeatures = [ "serde" ]; + }; + "toml_edit" = rec { + crateName = "toml_edit"; + version = "0.18.1"; + edition = "2021"; + sha256 = "0ax1bwzd4xclpids3b69nd1nxqi3x3qa4ymz51jbrp6hsy6rvian"; + authors = [ + "Andronik Ordian <write@reusable.software>" + "Ed Page <eopage@gmail.com>" + ]; + dependencies = [ + { + name = "indexmap"; + packageId = "indexmap"; + } + { + name = "nom8"; + packageId = "nom8"; + } + { + name = "serde"; + packageId = "serde"; + optional = true; + } + { + name = "serde_spanned"; + packageId = "serde_spanned"; + optional = true; + features = [ "serde" ]; + } + { + name = "toml_datetime"; + packageId = "toml_datetime"; + } + ]; + features = { + "easy" = [ "serde" ]; + "perf" = [ "dep:kstring" ]; + "serde" = [ "dep:serde" "toml_datetime/serde" "dep:serde_spanned" ]; + }; + resolvedDefaultFeatures = [ "default" "serde" ]; + }; + "tonic" = rec { + crateName = "tonic"; + version = "0.8.3"; + edition = "2018"; + sha256 = "1yymp2xi1p60g81p5jfaybcawpfkb01vqvzqn4cyz6wj7fnry8cg"; + authors = [ + "Lucio Franco <luciofranco14@gmail.com>" + ]; + dependencies = [ + { + name = "async-stream"; + packageId = "async-stream"; + } + { + name = "async-trait"; + packageId = "async-trait"; + optional = true; + } + { + name = "axum"; + packageId = "axum"; + optional = true; + usesDefaultFeatures = false; + } + { + name = "base64"; + packageId = "base64 0.13.1"; + } + { + name = "bytes"; + packageId = "bytes"; + } + { + name = "futures-core"; + packageId = "futures-core"; + usesDefaultFeatures = false; + } + { + name = "futures-util"; + packageId = "futures-util"; + usesDefaultFeatures = false; + } + { + name = "h2"; + packageId = "h2"; + optional = true; + } + { + name = "http"; + packageId = "http"; + } + { + name = "http-body"; + packageId = "http-body"; + } + { + name = "hyper"; + packageId = "hyper"; + optional = true; + features = [ "full" ]; + } + { + name = "hyper-timeout"; + packageId = "hyper-timeout"; + optional = true; + } + { + name = "percent-encoding"; + packageId = "percent-encoding"; + } + { + name = "pin-project"; + packageId = "pin-project"; + } + { + name = "prost"; + packageId = "prost"; + rename = "prost1"; + optional = true; + } + { + name = "prost-derive"; + packageId = "prost-derive"; + optional = true; + } + { + name = "tokio"; + packageId = "tokio"; + optional = true; + features = [ "net" ]; + } + { + name = "tokio-stream"; + packageId = "tokio-stream"; + } + { + name = "tokio-util"; + packageId = "tokio-util"; + features = [ "codec" ]; + } + { + name = "tower"; + packageId = "tower"; + optional = true; + usesDefaultFeatures = false; + features = [ "balance" "buffer" "discover" "limit" "load" "make" "timeout" "util" ]; + } + { + name = "tower-layer"; + packageId = "tower-layer"; + } + { + name = "tower-service"; + packageId = "tower-service"; + } + { + name = "tracing"; + packageId = "tracing"; + } + { + name = "tracing-futures"; + packageId = "tracing-futures"; + optional = true; + } + ]; + devDependencies = [ + { + name = "tokio"; + packageId = "tokio"; + features = [ "rt" "macros" ]; + } + { + name = "tower"; + packageId = "tower"; + features = [ "full" ]; + } + ]; + features = { + "async-trait" = [ "dep:async-trait" ]; + "axum" = [ "dep:axum" ]; + "channel" = [ "h2" "hyper" "tokio" "tower" "tracing-futures" "tokio/macros" "tokio/time" "hyper-timeout" ]; + "codegen" = [ "async-trait" ]; + "default" = [ "transport" "codegen" "prost" ]; + "flate2" = [ "dep:flate2" ]; + "gzip" = [ "flate2" ]; + "h2" = [ "dep:h2" ]; + "hyper" = [ "dep:hyper" ]; + "hyper-timeout" = [ "dep:hyper-timeout" ]; + "prost" = [ "prost1" "prost-derive" ]; + "prost-derive" = [ "dep:prost-derive" ]; + "prost1" = [ "dep:prost1" ]; + "rustls-native-certs" = [ "dep:rustls-native-certs" ]; + "rustls-pemfile" = [ "dep:rustls-pemfile" ]; + "tls" = [ "rustls-pemfile" "transport" "tokio-rustls" ]; + "tls-roots" = [ "tls-roots-common" "rustls-native-certs" ]; + "tls-roots-common" = [ "tls" ]; + "tls-webpki-roots" = [ "tls-roots-common" "webpki-roots" ]; + "tokio" = [ "dep:tokio" ]; + "tokio-rustls" = [ "dep:tokio-rustls" ]; + "tower" = [ "dep:tower" ]; + "tracing-futures" = [ "dep:tracing-futures" ]; + "transport" = [ "axum" "channel" ]; + "webpki-roots" = [ "dep:webpki-roots" ]; + }; + resolvedDefaultFeatures = [ "async-trait" "axum" "channel" "codegen" "default" "h2" "hyper" "hyper-timeout" "prost" "prost-derive" "prost1" "tokio" "tower" "tracing-futures" "transport" ]; + }; + "tonic-build" = rec { + crateName = "tonic-build"; + version = "0.8.4"; + edition = "2018"; + sha256 = "1i781mfzcbzfk6fnf7qp95q6r0b05ixvmynw4z0agq7pq2wykxav"; + authors = [ + "Lucio Franco <luciofranco14@gmail.com>" + ]; + dependencies = [ + { + name = "prettyplease"; + packageId = "prettyplease"; + } + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "prost-build"; + packageId = "prost-build"; + optional = true; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "syn"; + packageId = "syn 1.0.109"; + } + ]; + features = { + "cleanup-markdown" = [ "prost-build/cleanup-markdown" ]; + "default" = [ "transport" "prost" ]; + "prost" = [ "prost-build" ]; + "prost-build" = [ "dep:prost-build" ]; + }; + resolvedDefaultFeatures = [ "default" "prost" "prost-build" "transport" ]; + }; + "tonic-mock" = rec { + crateName = "tonic-mock"; + version = "0.1.0"; + edition = "2018"; + workspace_member = null; + src = pkgs.fetchgit { + url = "https://github.com/brainrake/tonic-mock"; + rev = "ec1a15510875de99d709d684190db5d9beab175e"; + sha256 = "0lwa03hpp0mxa6aa1zv5w68k61y4hccfm0q2ykyq392fwal8vb50"; + }; + authors = [ + "Tyr Chen <tyr.chen@gmail.com>" + ]; + dependencies = [ + { + name = "bytes"; + packageId = "bytes"; + } + { + name = "futures"; + packageId = "futures"; + } + { + name = "http"; + packageId = "http"; + } + { + name = "http-body"; + packageId = "http-body"; + } + { + name = "prost"; + packageId = "prost"; + } + { + name = "tonic"; + packageId = "tonic"; + } + ]; + + }; + "tonic-reflection" = rec { + crateName = "tonic-reflection"; + version = "0.5.0"; + edition = "2018"; + sha256 = "05i6c1fcbwkkj0p2r63vb5iac60b22a5rif3zx5li8a0slqgfm84"; + authors = [ + "James Nugent <james@jen20.com>" + "Samani G. Gikandi <samani@gojulas.com>" + ]; + dependencies = [ + { + name = "bytes"; + packageId = "bytes"; + } + { + name = "prost"; + packageId = "prost"; + } + { + name = "prost-types"; + packageId = "prost-types"; + } + { + name = "tokio"; + packageId = "tokio"; + features = [ "sync" ]; + } + { + name = "tokio-stream"; + packageId = "tokio-stream"; + features = [ "net" ]; + } + { + name = "tonic"; + packageId = "tonic"; + features = [ "codegen" "prost" ]; + } + ]; + buildDependencies = [ + { + name = "tonic-build"; + packageId = "tonic-build"; + features = [ "transport" "prost" ]; + } + ]; + + }; + "tower" = rec { + crateName = "tower"; + version = "0.4.13"; + edition = "2018"; + sha256 = "073wncyqav4sak1p755hf6vl66njgfc1z1g1di9rxx3cvvh9pymq"; + authors = [ + "Tower Maintainers <team@tower-rs.com>" + ]; + dependencies = [ + { + name = "futures-core"; + packageId = "futures-core"; + optional = true; + } + { + name = "futures-util"; + packageId = "futures-util"; + optional = true; + usesDefaultFeatures = false; + features = [ "alloc" ]; + } + { + name = "indexmap"; + packageId = "indexmap"; + optional = true; + } + { + name = "pin-project"; + packageId = "pin-project"; + optional = true; + } + { + name = "pin-project-lite"; + packageId = "pin-project-lite"; + optional = true; + } + { + name = "rand"; + packageId = "rand 0.8.5"; + optional = true; + features = [ "small_rng" ]; + } + { + name = "slab"; + packageId = "slab"; + optional = true; + } + { + name = "tokio"; + packageId = "tokio"; + optional = true; + features = [ "sync" ]; + } + { + name = "tokio-util"; + packageId = "tokio-util"; + optional = true; + usesDefaultFeatures = false; + } + { + name = "tower-layer"; + packageId = "tower-layer"; + } + { + name = "tower-service"; + packageId = "tower-service"; + } + { + name = "tracing"; + packageId = "tracing"; + optional = true; + usesDefaultFeatures = false; + features = [ "std" ]; + } + ]; + devDependencies = [ + { + name = "pin-project-lite"; + packageId = "pin-project-lite"; + } + { + name = "tokio"; + packageId = "tokio"; + features = [ "macros" "sync" "test-util" "rt-multi-thread" ]; + } + ]; + features = { + "__common" = [ "futures-core" "pin-project-lite" ]; + "balance" = [ "discover" "load" "ready-cache" "make" "rand" "slab" ]; + "buffer" = [ "__common" "tokio/sync" "tokio/rt" "tokio-util" "tracing" ]; + "default" = [ "log" ]; + "discover" = [ "__common" ]; + "filter" = [ "__common" "futures-util" ]; + "full" = [ "balance" "buffer" "discover" "filter" "hedge" "limit" "load" "load-shed" "make" "ready-cache" "reconnect" "retry" "spawn-ready" "steer" "timeout" "util" ]; + "futures-core" = [ "dep:futures-core" ]; + "futures-util" = [ "dep:futures-util" ]; + "hdrhistogram" = [ "dep:hdrhistogram" ]; + "hedge" = [ "util" "filter" "futures-util" "hdrhistogram" "tokio/time" "tracing" ]; + "indexmap" = [ "dep:indexmap" ]; + "limit" = [ "__common" "tokio/time" "tokio/sync" "tokio-util" "tracing" ]; + "load" = [ "__common" "tokio/time" "tracing" ]; + "load-shed" = [ "__common" ]; + "log" = [ "tracing/log" ]; + "make" = [ "futures-util" "pin-project-lite" "tokio/io-std" ]; + "pin-project" = [ "dep:pin-project" ]; + "pin-project-lite" = [ "dep:pin-project-lite" ]; + "rand" = [ "dep:rand" ]; + "ready-cache" = [ "futures-core" "futures-util" "indexmap" "tokio/sync" "tracing" "pin-project-lite" ]; + "reconnect" = [ "make" "tokio/io-std" "tracing" ]; + "retry" = [ "__common" "tokio/time" ]; + "slab" = [ "dep:slab" ]; + "spawn-ready" = [ "__common" "futures-util" "tokio/sync" "tokio/rt" "util" "tracing" ]; + "timeout" = [ "pin-project-lite" "tokio/time" ]; + "tokio" = [ "dep:tokio" ]; + "tokio-stream" = [ "dep:tokio-stream" ]; + "tokio-util" = [ "dep:tokio-util" ]; + "tracing" = [ "dep:tracing" ]; + "util" = [ "__common" "futures-util" "pin-project" ]; + }; + resolvedDefaultFeatures = [ "__common" "balance" "buffer" "default" "discover" "futures-core" "futures-util" "indexmap" "limit" "load" "log" "make" "pin-project" "pin-project-lite" "rand" "ready-cache" "slab" "timeout" "tokio" "tokio-util" "tracing" "util" ]; + }; + "tower-layer" = rec { + crateName = "tower-layer"; + version = "0.3.2"; + edition = "2018"; + sha256 = "1l7i17k9vlssrdg4s3b0ia5jjkmmxsvv8s9y9ih0jfi8ssz8s362"; + authors = [ + "Tower Maintainers <team@tower-rs.com>" + ]; + + }; + "tower-service" = rec { + crateName = "tower-service"; + version = "0.3.2"; + edition = "2018"; + sha256 = "0lmfzmmvid2yp2l36mbavhmqgsvzqf7r2wiwz73ml4xmwaf1rg5n"; + authors = [ + "Tower Maintainers <team@tower-rs.com>" + ]; + + }; + "tracing" = rec { + crateName = "tracing"; + version = "0.1.37"; + edition = "2018"; + sha256 = "1f2fylc79xmbh7v53kak6qyw27njbx227rd64kb4bga8ilxc7s4c"; + authors = [ + "Eliza Weisman <eliza@buoyant.io>" + "Tokio Contributors <team@tokio.rs>" + ]; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + { + name = "log"; + packageId = "log"; + optional = true; + } + { + name = "pin-project-lite"; + packageId = "pin-project-lite"; + } + { + name = "tracing-attributes"; + packageId = "tracing-attributes"; + optional = true; + } + { + name = "tracing-core"; + packageId = "tracing-core"; + usesDefaultFeatures = false; + } + ]; + devDependencies = [ + { + name = "log"; + packageId = "log"; + } + ]; + features = { + "attributes" = [ "tracing-attributes" ]; + "default" = [ "std" "attributes" ]; + "log" = [ "dep:log" ]; + "log-always" = [ "log" ]; + "std" = [ "tracing-core/std" ]; + "tracing-attributes" = [ "dep:tracing-attributes" ]; + "valuable" = [ "tracing-core/valuable" ]; + }; + resolvedDefaultFeatures = [ "attributes" "default" "log" "std" "tracing-attributes" ]; + }; + "tracing-attributes" = rec { + crateName = "tracing-attributes"; + version = "0.1.24"; + edition = "2018"; + sha256 = "0x3spb5h4m56035lrvrchbyhg8pxrg4sk0qij8d0ni815b5f6mqg"; + procMacro = true; + authors = [ + "Tokio Contributors <team@tokio.rs>" + "Eliza Weisman <eliza@buoyant.io>" + "David Barsky <dbarsky@amazon.com>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "syn"; + packageId = "syn 2.0.15"; + usesDefaultFeatures = false; + features = [ "full" "parsing" "printing" "visit-mut" "clone-impls" "extra-traits" "proc-macro" ]; + } + ]; + features = { }; + }; + "tracing-core" = rec { + crateName = "tracing-core"; + version = "0.1.30"; + edition = "2018"; + sha256 = "0fi1jz3jbzk3n7k379pwv3wfhn35c5gcwn000m2xh7xb1sx07sr4"; + authors = [ + "Tokio Contributors <team@tokio.rs>" + ]; + dependencies = [ + { + name = "once_cell"; + packageId = "once_cell"; + optional = true; + } + { + name = "valuable"; + packageId = "valuable"; + optional = true; + usesDefaultFeatures = false; + target = { target, features }: (target."tracing_unstable" or false); + } + ]; + features = { + "default" = [ "std" "valuable/std" ]; + "once_cell" = [ "dep:once_cell" ]; + "std" = [ "once_cell" ]; + "valuable" = [ "dep:valuable" ]; + }; + resolvedDefaultFeatures = [ "default" "once_cell" "std" "valuable" ]; + }; + "tracing-futures" = rec { + crateName = "tracing-futures"; + version = "0.2.5"; + edition = "2018"; + sha256 = "1wimg0iwa2ldq7xv98lvivvf3q9ykfminig8r1bs0ig22np9bl4p"; + authors = [ + "Eliza Weisman <eliza@buoyant.io>" + "Tokio Contributors <team@tokio.rs>" + ]; + dependencies = [ + { + name = "pin-project"; + packageId = "pin-project"; + optional = true; + } + { + name = "tracing"; + packageId = "tracing"; + usesDefaultFeatures = false; + } + ]; + features = { + "default" = [ "std-future" "std" ]; + "futures" = [ "dep:futures" ]; + "futures-01" = [ "futures_01" "std" ]; + "futures-03" = [ "std-future" "futures" "futures-task" "std" ]; + "futures-task" = [ "dep:futures-task" ]; + "futures_01" = [ "dep:futures_01" ]; + "pin-project" = [ "dep:pin-project" ]; + "std" = [ "tracing/std" ]; + "std-future" = [ "pin-project" ]; + "tokio" = [ "dep:tokio" ]; + "tokio-executor" = [ "dep:tokio-executor" ]; + }; + resolvedDefaultFeatures = [ "default" "pin-project" "std" "std-future" ]; + }; + "tracing-log" = rec { + crateName = "tracing-log"; + version = "0.1.3"; + edition = "2018"; + sha256 = "08prnkxq8yas6jvvjnvyx5v3hwblas5527wxxgbiw2yis8rsvpbq"; + authors = [ + "Tokio Contributors <team@tokio.rs>" + ]; + dependencies = [ + { + name = "lazy_static"; + packageId = "lazy_static"; + } + { + name = "log"; + packageId = "log"; + } + { + name = "tracing-core"; + packageId = "tracing-core"; + } + ]; + features = { + "ahash" = [ "dep:ahash" ]; + "default" = [ "log-tracer" "trace-logger" "std" ]; + "env_logger" = [ "dep:env_logger" ]; + "interest-cache" = [ "lru" "ahash" ]; + "lru" = [ "dep:lru" ]; + "std" = [ "log/std" ]; + }; + resolvedDefaultFeatures = [ "log-tracer" "std" ]; + }; + "tracing-serde" = rec { + crateName = "tracing-serde"; + version = "0.1.3"; + edition = "2018"; + sha256 = "1qfr0va69djvxqvjrx4vqq7p6myy414lx4w1f6amcn0hfwqj2sxw"; + authors = [ + "Tokio Contributors <team@tokio.rs>" + ]; + dependencies = [ + { + name = "serde"; + packageId = "serde"; + } + { + name = "tracing-core"; + packageId = "tracing-core"; + } + ]; + features = { + "valuable" = [ "valuable_crate" "valuable-serde" "tracing-core/valuable" ]; + "valuable-serde" = [ "dep:valuable-serde" ]; + "valuable_crate" = [ "dep:valuable_crate" ]; + }; + }; + "tracing-subscriber" = rec { + crateName = "tracing-subscriber"; + version = "0.3.17"; + edition = "2018"; + sha256 = "0xvwfpmb943hdy4gzyn7a2azgigf30mfd1kx10gyh5gr6yy539ih"; + authors = [ + "Eliza Weisman <eliza@buoyant.io>" + "David Barsky <me@davidbarsky.com>" + "Tokio Contributors <team@tokio.rs>" + ]; + dependencies = [ + { + name = "nu-ansi-term"; + packageId = "nu-ansi-term"; + optional = true; + } + { + name = "serde"; + packageId = "serde"; + optional = true; + } + { + name = "serde_json"; + packageId = "serde_json"; + optional = true; + } + { + name = "sharded-slab"; + packageId = "sharded-slab"; + optional = true; + } + { + name = "smallvec"; + packageId = "smallvec"; + optional = true; + } + { + name = "thread_local"; + packageId = "thread_local"; + optional = true; + } + { + name = "tracing-core"; + packageId = "tracing-core"; + usesDefaultFeatures = false; + } + { + name = "tracing-log"; + packageId = "tracing-log"; + optional = true; + usesDefaultFeatures = false; + features = [ "log-tracer" "std" ]; + } + { + name = "tracing-serde"; + packageId = "tracing-serde"; + optional = true; + } + ]; + devDependencies = [ + { + name = "tracing-log"; + packageId = "tracing-log"; + } + ]; + features = { + "ansi" = [ "fmt" "nu-ansi-term" ]; + "default" = [ "smallvec" "fmt" "ansi" "tracing-log" "std" ]; + "env-filter" = [ "matchers" "regex" "once_cell" "tracing" "std" "thread_local" ]; + "fmt" = [ "registry" "std" ]; + "json" = [ "tracing-serde" "serde" "serde_json" ]; + "local-time" = [ "time/local-offset" ]; + "matchers" = [ "dep:matchers" ]; + "nu-ansi-term" = [ "dep:nu-ansi-term" ]; + "once_cell" = [ "dep:once_cell" ]; + "parking_lot" = [ "dep:parking_lot" ]; + "regex" = [ "dep:regex" ]; + "registry" = [ "sharded-slab" "thread_local" "std" ]; + "serde" = [ "dep:serde" ]; + "serde_json" = [ "dep:serde_json" ]; + "sharded-slab" = [ "dep:sharded-slab" ]; + "smallvec" = [ "dep:smallvec" ]; + "std" = [ "alloc" "tracing-core/std" ]; + "thread_local" = [ "dep:thread_local" ]; + "time" = [ "dep:time" ]; + "tracing" = [ "dep:tracing" ]; + "tracing-log" = [ "dep:tracing-log" ]; + "tracing-serde" = [ "dep:tracing-serde" ]; + "valuable" = [ "tracing-core/valuable" "valuable_crate" "valuable-serde" "tracing-serde/valuable" ]; + "valuable-serde" = [ "dep:valuable-serde" ]; + "valuable_crate" = [ "dep:valuable_crate" ]; + }; + resolvedDefaultFeatures = [ "alloc" "ansi" "default" "fmt" "json" "nu-ansi-term" "registry" "serde" "serde_json" "sharded-slab" "smallvec" "std" "thread_local" "tracing-log" "tracing-serde" ]; + }; + "try-lock" = rec { + crateName = "try-lock"; + version = "0.2.4"; + edition = "2015"; + sha256 = "1vc15paa4zi06ixsxihwbvfn24d708nsyg1ncgqwcrn42byyqa1m"; + authors = [ + "Sean McArthur <sean@seanmonstar.com>" + ]; + + }; + "tvix-cli" = rec { + crateName = "tvix-cli"; + version = "0.1.0"; + edition = "2021"; + crateBin = [ + { + name = "tvix"; + path = "src/main.rs"; + requiredFeatures = [ ]; + } + ]; + # We can't filter paths with references in Nix 2.4 + # See https://github.com/NixOS/nix/issues/5410 + src = + if (lib.versionOlder builtins.nixVersion "2.4pre20211007") + then lib.cleanSourceWith { filter = sourceFilter; src = ./cli; } + else ./cli; + dependencies = [ + { + name = "clap"; + packageId = "clap 4.2.7"; + features = [ "derive" "env" ]; + } + { + name = "data-encoding"; + packageId = "data-encoding"; + } + { + name = "dirs"; + packageId = "dirs"; + } + { + name = "nix-compat"; + packageId = "nix-compat"; + } + { + name = "rustyline"; + packageId = "rustyline"; + } + { + name = "smol_str"; + packageId = "smol_str"; + } + { + name = "ssri"; + packageId = "ssri"; + } + { + name = "thiserror"; + packageId = "thiserror"; + } + { + name = "tvix-eval"; + packageId = "tvix-eval"; + } + { + name = "tvix-store"; + packageId = "tvix-store"; + } + { + name = "wu-manber"; + packageId = "wu-manber"; + } + ]; + + }; + "tvix-eval" = rec { + crateName = "tvix-eval"; + version = "0.1.0"; + edition = "2021"; + # We can't filter paths with references in Nix 2.4 + # See https://github.com/NixOS/nix/issues/5410 + src = + if (lib.versionOlder builtins.nixVersion "2.4pre20211007") + then lib.cleanSourceWith { filter = sourceFilter; src = ./eval; } + else ./eval; + libName = "tvix_eval"; + dependencies = [ + { + name = "codemap"; + packageId = "codemap"; + } + { + name = "codemap-diagnostic"; + packageId = "codemap-diagnostic"; + } + { + name = "dirs"; + packageId = "dirs"; + } + { + name = "genawaiter"; + packageId = "genawaiter"; + usesDefaultFeatures = false; + } + { + name = "imbl"; + packageId = "imbl"; + features = [ "serde" ]; + } + { + name = "lazy_static"; + packageId = "lazy_static"; + } + { + name = "lexical-core"; + packageId = "lexical-core"; + features = [ "format" "parse-floats" ]; + } + { + name = "path-clean"; + packageId = "path-clean"; + } + { + name = "proptest"; + packageId = "proptest"; + optional = true; + usesDefaultFeatures = false; + features = [ "std" "alloc" "break-dead-code" "tempfile" ]; + } + { + name = "regex"; + packageId = "regex"; + } + { + name = "rnix"; + packageId = "rnix"; + } + { + name = "rowan"; + packageId = "rowan"; + } + { + name = "serde"; + packageId = "serde"; + features = [ "rc" "derive" ]; + } + { + name = "serde_json"; + packageId = "serde_json"; + } + { + name = "smol_str"; + packageId = "smol_str"; + } + { + name = "tabwriter"; + packageId = "tabwriter"; + } + { + name = "test-strategy"; + packageId = "test-strategy"; + optional = true; + } + { + name = "toml"; + packageId = "toml"; + } + { + name = "tvix-eval-builtin-macros"; + packageId = "tvix-eval-builtin-macros"; + rename = "builtin-macros"; + } + { + name = "xml-rs"; + packageId = "xml-rs"; + } + ]; + devDependencies = [ + { + name = "criterion"; + packageId = "criterion"; + } + { + name = "itertools"; + packageId = "itertools"; + } + { + name = "pretty_assertions"; + packageId = "pretty_assertions"; + } + { + name = "tempdir"; + packageId = "tempdir"; + } + { + name = "test-generator"; + packageId = "test-generator"; + } + ]; + features = { + "arbitrary" = [ "proptest" "test-strategy" "imbl/proptest" ]; + "default" = [ "impure" "arbitrary" "nix_tests" ]; + "proptest" = [ "dep:proptest" ]; + "test-strategy" = [ "dep:test-strategy" ]; + }; + resolvedDefaultFeatures = [ "arbitrary" "default" "impure" "nix_tests" "proptest" "test-strategy" ]; + }; + "tvix-eval-builtin-macros" = rec { + crateName = "tvix-eval-builtin-macros"; + version = "0.0.1"; + edition = "2021"; + # We can't filter paths with references in Nix 2.4 + # See https://github.com/NixOS/nix/issues/5410 + src = + if (lib.versionOlder builtins.nixVersion "2.4pre20211007") + then lib.cleanSourceWith { filter = sourceFilter; src = ./eval/builtin-macros; } + else ./eval/builtin-macros; + procMacro = true; + authors = [ + "Griffin Smith <root@gws.fyi>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "syn"; + packageId = "syn 1.0.109"; + features = [ "full" "parsing" "printing" "visit" "visit-mut" "extra-traits" ]; + } + ]; + devDependencies = [ + { + name = "tvix-eval"; + packageId = "tvix-eval"; + } + ]; + + }; + "tvix-serde" = rec { + crateName = "tvix-serde"; + version = "0.1.0"; + edition = "2021"; + # We can't filter paths with references in Nix 2.4 + # See https://github.com/NixOS/nix/issues/5410 + src = + if (lib.versionOlder builtins.nixVersion "2.4pre20211007") + then lib.cleanSourceWith { filter = sourceFilter; src = ./serde; } + else ./serde; + dependencies = [ + { + name = "serde"; + packageId = "serde"; + features = [ "derive" ]; + } + { + name = "tvix-eval"; + packageId = "tvix-eval"; + } + ]; + + }; + "tvix-store" = rec { + crateName = "tvix-store"; + version = "0.1.0"; + edition = "2021"; + crateBin = [ + { + name = "tvix-store"; + path = "src/bin/tvix-store.rs"; + requiredFeatures = [ ]; + } + ]; + # We can't filter paths with references in Nix 2.4 + # See https://github.com/NixOS/nix/issues/5410 + src = + if (lib.versionOlder builtins.nixVersion "2.4pre20211007") + then lib.cleanSourceWith { filter = sourceFilter; src = ./store; } + else ./store; + dependencies = [ + { + name = "anyhow"; + packageId = "anyhow"; + } + { + name = "blake3"; + packageId = "blake3"; + features = [ "rayon" "std" ]; + } + { + name = "bytes"; + packageId = "bytes"; + } + { + name = "clap"; + packageId = "clap 4.2.7"; + features = [ "derive" "env" ]; + } + { + name = "count-write"; + packageId = "count-write"; + } + { + name = "data-encoding"; + packageId = "data-encoding"; + } + { + name = "futures"; + packageId = "futures"; + } + { + name = "lazy_static"; + packageId = "lazy_static"; + } + { + name = "nix-compat"; + packageId = "nix-compat"; + } + { + name = "prost"; + packageId = "prost"; + } + { + name = "rayon"; + packageId = "rayon"; + } + { + name = "serde_json"; + packageId = "serde_json"; + } + { + name = "sha2"; + packageId = "sha2 0.10.6"; + } + { + name = "sled"; + packageId = "sled"; + features = [ "compression" ]; + } + { + name = "smol_str"; + packageId = "smol_str"; + } + { + name = "thiserror"; + packageId = "thiserror"; + } + { + name = "tokio"; + packageId = "tokio"; + features = [ "rt-multi-thread" "net" ]; + } + { + name = "tokio-stream"; + packageId = "tokio-stream"; + } + { + name = "tokio-util"; + packageId = "tokio-util"; + features = [ "io" "io-util" ]; + } + { + name = "tonic"; + packageId = "tonic"; + } + { + name = "tonic-reflection"; + packageId = "tonic-reflection"; + optional = true; + } + { + name = "tower"; + packageId = "tower"; + } + { + name = "tracing"; + packageId = "tracing"; + } + { + name = "tracing-subscriber"; + packageId = "tracing-subscriber"; + features = [ "json" ]; + } + { + name = "tvix-eval"; + packageId = "tvix-eval"; + } + { + name = "walkdir"; + packageId = "walkdir"; + } + ]; + buildDependencies = [ + { + name = "prost-build"; + packageId = "prost-build"; + } + { + name = "tonic-build"; + packageId = "tonic-build"; + } + ]; + devDependencies = [ + { + name = "tempfile"; + packageId = "tempfile"; + } + { + name = "test-case"; + packageId = "test-case"; + } + { + name = "tonic-mock"; + packageId = "tonic-mock"; + } + ]; + features = { + "default" = [ "reflection" ]; + "reflection" = [ "tonic-reflection" ]; + "tonic-reflection" = [ "dep:tonic-reflection" ]; + }; + resolvedDefaultFeatures = [ "default" "reflection" "tonic-reflection" ]; + }; + "typenum" = rec { + crateName = "typenum"; + version = "1.16.0"; + edition = "2018"; + sha256 = "1fhb9iaqyjn4dzn2vl86kxjhp4xpw5gynczlnqzf4x6rjgpn2ya9"; + build = "build/main.rs"; + authors = [ + "Paho Lurie-Gregg <paho@paholg.com>" + "Andre Bogus <bogusandre@gmail.com>" + ]; + features = { + "scale-info" = [ "dep:scale-info" ]; + "scale_info" = [ "scale-info/derive" ]; + }; + }; + "unarray" = rec { + crateName = "unarray"; + version = "0.1.4"; + edition = "2018"; + sha256 = "154smf048k84prsdgh09nkm2n0w0336v84jd4zikyn6v6jrqbspa"; + + }; + "unicode-ident" = rec { + crateName = "unicode-ident"; + version = "1.0.8"; + edition = "2018"; + sha256 = "1x4v4v95fv9gn5zbpm23sa9awjvmclap1wh1lmikmw9rna3llip5"; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + ]; + + }; + "unicode-segmentation" = rec { + crateName = "unicode-segmentation"; + version = "1.10.1"; + edition = "2018"; + sha256 = "0dky2hm5k51xy11hc3nk85p533rvghd462b6i0c532b7hl4j9mhx"; + authors = [ + "kwantam <kwantam@gmail.com>" + "Manish Goregaokar <manishsmail@gmail.com>" + ]; + features = { }; + }; + "unicode-width" = rec { + crateName = "unicode-width"; + version = "0.1.10"; + edition = "2015"; + sha256 = "12vc3wv0qwg8rzcgb9bhaf5119dlmd6lmkhbfy1zfls6n7jx3vf0"; + authors = [ + "kwantam <kwantam@gmail.com>" + "Manish Goregaokar <manishsmail@gmail.com>" + ]; + features = { + "compiler_builtins" = [ "dep:compiler_builtins" ]; + "core" = [ "dep:core" ]; + "rustc-dep-of-std" = [ "std" "core" "compiler_builtins" ]; + "std" = [ "dep:std" ]; + }; + resolvedDefaultFeatures = [ "default" ]; + }; + "unicode-xid" = rec { + crateName = "unicode-xid"; + version = "0.1.0"; + edition = "2015"; + sha256 = "1z57lqh4s18rr4x0j4fw4fmp9hf9346h0kmdgqsqx0fhjr3k0wpw"; + authors = [ + "erick.tryzelaar <erick.tryzelaar@gmail.com>" + "kwantam <kwantam@gmail.com>" + ]; + features = { }; + resolvedDefaultFeatures = [ "default" ]; + }; + "utf8parse" = rec { + crateName = "utf8parse"; + version = "0.2.1"; + edition = "2018"; + sha256 = "02ip1a0az0qmc2786vxk2nqwsgcwf17d3a38fkf0q7hrmwh9c6vi"; + authors = [ + "Joe Wilm <joe@jwilm.com>" + "Christian Duerr <contact@christianduerr.com>" + ]; + features = { }; + resolvedDefaultFeatures = [ "default" ]; + }; + "valuable" = rec { + crateName = "valuable"; + version = "0.1.0"; + edition = "2018"; + sha256 = "0v9gp3nkjbl30z0fd56d8mx7w1csk86wwjhfjhr400wh9mfpw2w3"; + features = { + "default" = [ "std" ]; + "derive" = [ "valuable-derive" ]; + "std" = [ "alloc" ]; + "valuable-derive" = [ "dep:valuable-derive" ]; + }; + resolvedDefaultFeatures = [ "alloc" "std" ]; + }; + "version_check" = rec { + crateName = "version_check"; + version = "0.9.4"; + edition = "2015"; + sha256 = "0gs8grwdlgh0xq660d7wr80x14vxbizmd8dbp29p2pdncx8lp1s9"; + authors = [ + "Sergio Benitez <sb@sergio.bz>" + ]; + + }; + "wait-timeout" = rec { + crateName = "wait-timeout"; + version = "0.2.0"; + edition = "2015"; + crateBin = [ ]; + sha256 = "1xpkk0j5l9pfmjfh1pi0i89invlavfrd9av5xp0zhxgb29dhy84z"; + authors = [ + "Alex Crichton <alex@alexcrichton.com>" + ]; + dependencies = [ + { + name = "libc"; + packageId = "libc"; + target = { target, features }: (target."unix" or false); + } + ]; + + }; + "walkdir" = rec { + crateName = "walkdir"; + version = "2.3.3"; + edition = "2018"; + sha256 = "16768hy32kcvghq7v4ci8llfjvdiwrwg6sj9nzcdiisnv9699prn"; + authors = [ + "Andrew Gallant <jamslam@gmail.com>" + ]; + dependencies = [ + { + name = "same-file"; + packageId = "same-file"; + } + { + name = "winapi-util"; + packageId = "winapi-util"; + target = { target, features }: (target."windows" or false); + } + ]; + + }; + "want" = rec { + crateName = "want"; + version = "0.3.0"; + edition = "2018"; + sha256 = "181b2zmwfq389x9n2g1n37cvcvvdand832zz6v8i1l8wrdlaks0w"; + authors = [ + "Sean McArthur <sean@seanmonstar.com>" + ]; + dependencies = [ + { + name = "log"; + packageId = "log"; + } + { + name = "try-lock"; + packageId = "try-lock"; + } + ]; + + }; + "wasi" = rec { + crateName = "wasi"; + version = "0.11.0+wasi-snapshot-preview1"; + edition = "2018"; + sha256 = "08z4hxwkpdpalxjps1ai9y7ihin26y9f476i53dv98v45gkqg3cw"; + authors = [ + "The Cranelift Project Developers" + ]; + features = { + "compiler_builtins" = [ "dep:compiler_builtins" ]; + "core" = [ "dep:core" ]; + "default" = [ "std" ]; + "rustc-dep-of-std" = [ "compiler_builtins" "core" "rustc-std-workspace-alloc" ]; + "rustc-std-workspace-alloc" = [ "dep:rustc-std-workspace-alloc" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "wasm-bindgen" = rec { + crateName = "wasm-bindgen"; + version = "0.2.84"; + edition = "2018"; + sha256 = "0fx5gh0b4n6znfa3blz92wn1k4bbiysyq9m95s7rn3gk46ydry1i"; + authors = [ + "The wasm-bindgen Developers" + ]; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + { + name = "wasm-bindgen-macro"; + packageId = "wasm-bindgen-macro"; + } + ]; + features = { + "default" = [ "spans" "std" ]; + "enable-interning" = [ "std" ]; + "serde" = [ "dep:serde" ]; + "serde-serialize" = [ "serde" "serde_json" "std" ]; + "serde_json" = [ "dep:serde_json" ]; + "spans" = [ "wasm-bindgen-macro/spans" ]; + "strict-macro" = [ "wasm-bindgen-macro/strict-macro" ]; + "xxx_debug_only_print_generated_code" = [ "wasm-bindgen-macro/xxx_debug_only_print_generated_code" ]; + }; + resolvedDefaultFeatures = [ "default" "spans" "std" ]; + }; + "wasm-bindgen-backend" = rec { + crateName = "wasm-bindgen-backend"; + version = "0.2.84"; + edition = "2018"; + sha256 = "1ffc0wb293ha56i66f830x7f8aa2xql69a21lrasy1ncbgyr1klm"; + authors = [ + "The wasm-bindgen Developers" + ]; + dependencies = [ + { + name = "bumpalo"; + packageId = "bumpalo"; + } + { + name = "log"; + packageId = "log"; + } + { + name = "once_cell"; + packageId = "once_cell"; + } + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "syn"; + packageId = "syn 1.0.109"; + features = [ "full" ]; + } + { + name = "wasm-bindgen-shared"; + packageId = "wasm-bindgen-shared"; + } + ]; + features = { + "extra-traits" = [ "syn/extra-traits" ]; + }; + resolvedDefaultFeatures = [ "spans" ]; + }; + "wasm-bindgen-macro" = rec { + crateName = "wasm-bindgen-macro"; + version = "0.2.84"; + edition = "2018"; + sha256 = "1idlq28awqhq8rclb22rn5xix82w9a4rgy11vkapzhzd1dygf8ac"; + procMacro = true; + authors = [ + "The wasm-bindgen Developers" + ]; + dependencies = [ + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "wasm-bindgen-macro-support"; + packageId = "wasm-bindgen-macro-support"; + } + ]; + features = { + "spans" = [ "wasm-bindgen-macro-support/spans" ]; + "strict-macro" = [ "wasm-bindgen-macro-support/strict-macro" ]; + }; + resolvedDefaultFeatures = [ "spans" ]; + }; + "wasm-bindgen-macro-support" = rec { + crateName = "wasm-bindgen-macro-support"; + version = "0.2.84"; + edition = "2018"; + sha256 = "1xm56lpi0rihh8ny7x085dgs3jdm47spgqflb98wghyadwq83zra"; + authors = [ + "The wasm-bindgen Developers" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2 1.0.56"; + } + { + name = "quote"; + packageId = "quote 1.0.26"; + } + { + name = "syn"; + packageId = "syn 1.0.109"; + features = [ "visit" "full" ]; + } + { + name = "wasm-bindgen-backend"; + packageId = "wasm-bindgen-backend"; + } + { + name = "wasm-bindgen-shared"; + packageId = "wasm-bindgen-shared"; + } + ]; + features = { + "extra-traits" = [ "syn/extra-traits" ]; + "spans" = [ "wasm-bindgen-backend/spans" ]; + }; + resolvedDefaultFeatures = [ "spans" ]; + }; + "wasm-bindgen-shared" = rec { + crateName = "wasm-bindgen-shared"; + version = "0.2.84"; + edition = "2018"; + sha256 = "0pcvk1c97r1pprzfaxxn359r0wqg5bm33ylbwgjh8f4cwbvzwih0"; + authors = [ + "The wasm-bindgen Developers" + ]; + + }; + "web-sys" = rec { + crateName = "web-sys"; + version = "0.3.61"; + edition = "2018"; + sha256 = "15qbbdbhyz02srvm01p0cvlh0pvmbbin9hislp0yx8rvnbs9jfz3"; + authors = [ + "The wasm-bindgen Developers" + ]; + dependencies = [ + { + name = "js-sys"; + packageId = "js-sys"; + } + { + name = "wasm-bindgen"; + packageId = "wasm-bindgen"; + } + ]; + features = { + "AbortSignal" = [ "EventTarget" ]; + "AnalyserNode" = [ "AudioNode" "EventTarget" ]; + "Animation" = [ "EventTarget" ]; + "AnimationEvent" = [ "Event" ]; + "AnimationPlaybackEvent" = [ "Event" ]; + "Attr" = [ "EventTarget" "Node" ]; + "AudioBufferSourceNode" = [ "AudioNode" "AudioScheduledSourceNode" "EventTarget" ]; + "AudioContext" = [ "BaseAudioContext" "EventTarget" ]; + "AudioDestinationNode" = [ "AudioNode" "EventTarget" ]; + "AudioNode" = [ "EventTarget" ]; + "AudioProcessingEvent" = [ "Event" ]; + "AudioScheduledSourceNode" = [ "AudioNode" "EventTarget" ]; + "AudioStreamTrack" = [ "EventTarget" "MediaStreamTrack" ]; + "AudioTrackList" = [ "EventTarget" ]; + "AudioWorklet" = [ "Worklet" ]; + "AudioWorkletGlobalScope" = [ "WorkletGlobalScope" ]; + "AudioWorkletNode" = [ "AudioNode" "EventTarget" ]; + "AuthenticatorAssertionResponse" = [ "AuthenticatorResponse" ]; + "AuthenticatorAttestationResponse" = [ "AuthenticatorResponse" ]; + "BaseAudioContext" = [ "EventTarget" ]; + "BatteryManager" = [ "EventTarget" ]; + "BeforeUnloadEvent" = [ "Event" ]; + "BiquadFilterNode" = [ "AudioNode" "EventTarget" ]; + "BlobEvent" = [ "Event" ]; + "Bluetooth" = [ "EventTarget" ]; + "BluetoothAdvertisingEvent" = [ "Event" ]; + "BluetoothDevice" = [ "EventTarget" ]; + "BluetoothPermissionResult" = [ "EventTarget" "PermissionStatus" ]; + "BluetoothRemoteGattCharacteristic" = [ "EventTarget" ]; + "BluetoothRemoteGattService" = [ "EventTarget" ]; + "BroadcastChannel" = [ "EventTarget" ]; + "CanvasCaptureMediaStream" = [ "EventTarget" "MediaStream" ]; + "CdataSection" = [ "CharacterData" "EventTarget" "Node" "Text" ]; + "ChannelMergerNode" = [ "AudioNode" "EventTarget" ]; + "ChannelSplitterNode" = [ "AudioNode" "EventTarget" ]; + "CharacterData" = [ "EventTarget" "Node" ]; + "ChromeWorker" = [ "EventTarget" "Worker" ]; + "Clipboard" = [ "EventTarget" ]; + "ClipboardEvent" = [ "Event" ]; + "CloseEvent" = [ "Event" ]; + "Comment" = [ "CharacterData" "EventTarget" "Node" ]; + "CompositionEvent" = [ "Event" "UiEvent" ]; + "ConstantSourceNode" = [ "AudioNode" "AudioScheduledSourceNode" "EventTarget" ]; + "ConvolverNode" = [ "AudioNode" "EventTarget" ]; + "CssAnimation" = [ "Animation" "EventTarget" ]; + "CssConditionRule" = [ "CssGroupingRule" "CssRule" ]; + "CssCounterStyleRule" = [ "CssRule" ]; + "CssFontFaceRule" = [ "CssRule" ]; + "CssFontFeatureValuesRule" = [ "CssRule" ]; + "CssGroupingRule" = [ "CssRule" ]; + "CssImportRule" = [ "CssRule" ]; + "CssKeyframeRule" = [ "CssRule" ]; + "CssKeyframesRule" = [ "CssRule" ]; + "CssMediaRule" = [ "CssConditionRule" "CssGroupingRule" "CssRule" ]; + "CssNamespaceRule" = [ "CssRule" ]; + "CssPageRule" = [ "CssRule" ]; + "CssStyleRule" = [ "CssRule" ]; + "CssStyleSheet" = [ "StyleSheet" ]; + "CssSupportsRule" = [ "CssConditionRule" "CssGroupingRule" "CssRule" ]; + "CssTransition" = [ "Animation" "EventTarget" ]; + "CustomEvent" = [ "Event" ]; + "DedicatedWorkerGlobalScope" = [ "EventTarget" "WorkerGlobalScope" ]; + "DelayNode" = [ "AudioNode" "EventTarget" ]; + "DeviceLightEvent" = [ "Event" ]; + "DeviceMotionEvent" = [ "Event" ]; + "DeviceOrientationEvent" = [ "Event" ]; + "DeviceProximityEvent" = [ "Event" ]; + "Document" = [ "EventTarget" "Node" ]; + "DocumentFragment" = [ "EventTarget" "Node" ]; + "DocumentTimeline" = [ "AnimationTimeline" ]; + "DocumentType" = [ "EventTarget" "Node" ]; + "DomMatrix" = [ "DomMatrixReadOnly" ]; + "DomPoint" = [ "DomPointReadOnly" ]; + "DomRect" = [ "DomRectReadOnly" ]; + "DomRequest" = [ "EventTarget" ]; + "DragEvent" = [ "Event" "MouseEvent" "UiEvent" ]; + "DynamicsCompressorNode" = [ "AudioNode" "EventTarget" ]; + "Element" = [ "EventTarget" "Node" ]; + "ErrorEvent" = [ "Event" ]; + "EventSource" = [ "EventTarget" ]; + "ExtendableEvent" = [ "Event" ]; + "ExtendableMessageEvent" = [ "Event" "ExtendableEvent" ]; + "FetchEvent" = [ "Event" "ExtendableEvent" ]; + "FetchObserver" = [ "EventTarget" ]; + "File" = [ "Blob" ]; + "FileReader" = [ "EventTarget" ]; + "FileSystemDirectoryEntry" = [ "FileSystemEntry" ]; + "FileSystemFileEntry" = [ "FileSystemEntry" ]; + "FocusEvent" = [ "Event" "UiEvent" ]; + "FontFaceSet" = [ "EventTarget" ]; + "FontFaceSetLoadEvent" = [ "Event" ]; + "GainNode" = [ "AudioNode" "EventTarget" ]; + "GamepadAxisMoveEvent" = [ "Event" "GamepadEvent" ]; + "GamepadButtonEvent" = [ "Event" "GamepadEvent" ]; + "GamepadEvent" = [ "Event" ]; + "GpuDevice" = [ "EventTarget" ]; + "GpuInternalError" = [ "GpuError" ]; + "GpuOutOfMemoryError" = [ "GpuError" ]; + "GpuUncapturedErrorEvent" = [ "Event" ]; + "GpuValidationError" = [ "GpuError" ]; + "HashChangeEvent" = [ "Event" ]; + "Hid" = [ "EventTarget" ]; + "HidConnectionEvent" = [ "Event" ]; + "HidDevice" = [ "EventTarget" ]; + "HidInputReportEvent" = [ "Event" ]; + "HtmlAnchorElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlAreaElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlAudioElement" = [ "Element" "EventTarget" "HtmlElement" "HtmlMediaElement" "Node" ]; + "HtmlBaseElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlBodyElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlBrElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlButtonElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlCanvasElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlDListElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlDataElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlDataListElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlDetailsElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlDialogElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlDirectoryElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlDivElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlDocument" = [ "Document" "EventTarget" "Node" ]; + "HtmlElement" = [ "Element" "EventTarget" "Node" ]; + "HtmlEmbedElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlFieldSetElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlFontElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlFormControlsCollection" = [ "HtmlCollection" ]; + "HtmlFormElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlFrameElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlFrameSetElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlHeadElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlHeadingElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlHrElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlHtmlElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlIFrameElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlImageElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlInputElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlLabelElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlLegendElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlLiElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlLinkElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlMapElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlMediaElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlMenuElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlMenuItemElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlMetaElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlMeterElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlModElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlOListElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlObjectElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlOptGroupElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlOptionElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlOptionsCollection" = [ "HtmlCollection" ]; + "HtmlOutputElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlParagraphElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlParamElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlPictureElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlPreElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlProgressElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlQuoteElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlScriptElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlSelectElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlSlotElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlSourceElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlSpanElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlStyleElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTableCaptionElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTableCellElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTableColElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTableElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTableRowElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTableSectionElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTemplateElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTextAreaElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTimeElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTitleElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTrackElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlUListElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlUnknownElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlVideoElement" = [ "Element" "EventTarget" "HtmlElement" "HtmlMediaElement" "Node" ]; + "IdbCursorWithValue" = [ "IdbCursor" ]; + "IdbDatabase" = [ "EventTarget" ]; + "IdbFileHandle" = [ "EventTarget" ]; + "IdbFileRequest" = [ "DomRequest" "EventTarget" ]; + "IdbLocaleAwareKeyRange" = [ "IdbKeyRange" ]; + "IdbMutableFile" = [ "EventTarget" ]; + "IdbOpenDbRequest" = [ "EventTarget" "IdbRequest" ]; + "IdbRequest" = [ "EventTarget" ]; + "IdbTransaction" = [ "EventTarget" ]; + "IdbVersionChangeEvent" = [ "Event" ]; + "IirFilterNode" = [ "AudioNode" "EventTarget" ]; + "ImageCaptureErrorEvent" = [ "Event" ]; + "ImageTrack" = [ "EventTarget" ]; + "InputEvent" = [ "Event" "UiEvent" ]; + "KeyboardEvent" = [ "Event" "UiEvent" ]; + "KeyframeEffect" = [ "AnimationEffect" ]; + "LocalMediaStream" = [ "EventTarget" "MediaStream" ]; + "MediaDevices" = [ "EventTarget" ]; + "MediaElementAudioSourceNode" = [ "AudioNode" "EventTarget" ]; + "MediaEncryptedEvent" = [ "Event" ]; + "MediaKeyError" = [ "Event" ]; + "MediaKeyMessageEvent" = [ "Event" ]; + "MediaKeySession" = [ "EventTarget" ]; + "MediaQueryList" = [ "EventTarget" ]; + "MediaQueryListEvent" = [ "Event" ]; + "MediaRecorder" = [ "EventTarget" ]; + "MediaRecorderErrorEvent" = [ "Event" ]; + "MediaSource" = [ "EventTarget" ]; + "MediaStream" = [ "EventTarget" ]; + "MediaStreamAudioDestinationNode" = [ "AudioNode" "EventTarget" ]; + "MediaStreamAudioSourceNode" = [ "AudioNode" "EventTarget" ]; + "MediaStreamEvent" = [ "Event" ]; + "MediaStreamTrack" = [ "EventTarget" ]; + "MediaStreamTrackEvent" = [ "Event" ]; + "MediaStreamTrackGenerator" = [ "EventTarget" "MediaStreamTrack" ]; + "MessageEvent" = [ "Event" ]; + "MessagePort" = [ "EventTarget" ]; + "MidiAccess" = [ "EventTarget" ]; + "MidiConnectionEvent" = [ "Event" ]; + "MidiInput" = [ "EventTarget" "MidiPort" ]; + "MidiMessageEvent" = [ "Event" ]; + "MidiOutput" = [ "EventTarget" "MidiPort" ]; + "MidiPort" = [ "EventTarget" ]; + "MouseEvent" = [ "Event" "UiEvent" ]; + "MouseScrollEvent" = [ "Event" "MouseEvent" "UiEvent" ]; + "MutationEvent" = [ "Event" ]; + "NetworkInformation" = [ "EventTarget" ]; + "Node" = [ "EventTarget" ]; + "Notification" = [ "EventTarget" ]; + "NotificationEvent" = [ "Event" "ExtendableEvent" ]; + "OfflineAudioCompletionEvent" = [ "Event" ]; + "OfflineAudioContext" = [ "BaseAudioContext" "EventTarget" ]; + "OfflineResourceList" = [ "EventTarget" ]; + "OffscreenCanvas" = [ "EventTarget" ]; + "OscillatorNode" = [ "AudioNode" "AudioScheduledSourceNode" "EventTarget" ]; + "PageTransitionEvent" = [ "Event" ]; + "PaintWorkletGlobalScope" = [ "WorkletGlobalScope" ]; + "PannerNode" = [ "AudioNode" "EventTarget" ]; + "PaymentMethodChangeEvent" = [ "Event" "PaymentRequestUpdateEvent" ]; + "PaymentRequestUpdateEvent" = [ "Event" ]; + "Performance" = [ "EventTarget" ]; + "PerformanceMark" = [ "PerformanceEntry" ]; + "PerformanceMeasure" = [ "PerformanceEntry" ]; + "PerformanceNavigationTiming" = [ "PerformanceEntry" "PerformanceResourceTiming" ]; + "PerformanceResourceTiming" = [ "PerformanceEntry" ]; + "PermissionStatus" = [ "EventTarget" ]; + "PointerEvent" = [ "Event" "MouseEvent" "UiEvent" ]; + "PopStateEvent" = [ "Event" ]; + "PopupBlockedEvent" = [ "Event" ]; + "PresentationAvailability" = [ "EventTarget" ]; + "PresentationConnection" = [ "EventTarget" ]; + "PresentationConnectionAvailableEvent" = [ "Event" ]; + "PresentationConnectionCloseEvent" = [ "Event" ]; + "PresentationConnectionList" = [ "EventTarget" ]; + "PresentationRequest" = [ "EventTarget" ]; + "ProcessingInstruction" = [ "CharacterData" "EventTarget" "Node" ]; + "ProgressEvent" = [ "Event" ]; + "PromiseRejectionEvent" = [ "Event" ]; + "PublicKeyCredential" = [ "Credential" ]; + "PushEvent" = [ "Event" "ExtendableEvent" ]; + "RadioNodeList" = [ "NodeList" ]; + "RtcDataChannel" = [ "EventTarget" ]; + "RtcDataChannelEvent" = [ "Event" ]; + "RtcPeerConnection" = [ "EventTarget" ]; + "RtcPeerConnectionIceEvent" = [ "Event" ]; + "RtcTrackEvent" = [ "Event" ]; + "RtcdtmfSender" = [ "EventTarget" ]; + "RtcdtmfToneChangeEvent" = [ "Event" ]; + "Screen" = [ "EventTarget" ]; + "ScreenOrientation" = [ "EventTarget" ]; + "ScriptProcessorNode" = [ "AudioNode" "EventTarget" ]; + "ScrollAreaEvent" = [ "Event" "UiEvent" ]; + "SecurityPolicyViolationEvent" = [ "Event" ]; + "Serial" = [ "EventTarget" ]; + "SerialPort" = [ "EventTarget" ]; + "ServiceWorker" = [ "EventTarget" ]; + "ServiceWorkerContainer" = [ "EventTarget" ]; + "ServiceWorkerGlobalScope" = [ "EventTarget" "WorkerGlobalScope" ]; + "ServiceWorkerRegistration" = [ "EventTarget" ]; + "ShadowRoot" = [ "DocumentFragment" "EventTarget" "Node" ]; + "SharedWorker" = [ "EventTarget" ]; + "SharedWorkerGlobalScope" = [ "EventTarget" "WorkerGlobalScope" ]; + "SourceBuffer" = [ "EventTarget" ]; + "SourceBufferList" = [ "EventTarget" ]; + "SpeechRecognition" = [ "EventTarget" ]; + "SpeechRecognitionError" = [ "Event" ]; + "SpeechRecognitionEvent" = [ "Event" ]; + "SpeechSynthesis" = [ "EventTarget" ]; + "SpeechSynthesisErrorEvent" = [ "Event" "SpeechSynthesisEvent" ]; + "SpeechSynthesisEvent" = [ "Event" ]; + "SpeechSynthesisUtterance" = [ "EventTarget" ]; + "StereoPannerNode" = [ "AudioNode" "EventTarget" ]; + "StorageEvent" = [ "Event" ]; + "SubmitEvent" = [ "Event" ]; + "SvgAnimateElement" = [ "Element" "EventTarget" "Node" "SvgAnimationElement" "SvgElement" ]; + "SvgAnimateMotionElement" = [ "Element" "EventTarget" "Node" "SvgAnimationElement" "SvgElement" ]; + "SvgAnimateTransformElement" = [ "Element" "EventTarget" "Node" "SvgAnimationElement" "SvgElement" ]; + "SvgAnimationElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgCircleElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ]; + "SvgClipPathElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgComponentTransferFunctionElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgDefsElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgDescElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgElement" = [ "Element" "EventTarget" "Node" ]; + "SvgEllipseElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ]; + "SvgFilterElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgForeignObjectElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgGeometryElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgGradientElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgGraphicsElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgImageElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgLineElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ]; + "SvgLinearGradientElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGradientElement" ]; + "SvgMarkerElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgMaskElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgMetadataElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgPathElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ]; + "SvgPathSegArcAbs" = [ "SvgPathSeg" ]; + "SvgPathSegArcRel" = [ "SvgPathSeg" ]; + "SvgPathSegClosePath" = [ "SvgPathSeg" ]; + "SvgPathSegCurvetoCubicAbs" = [ "SvgPathSeg" ]; + "SvgPathSegCurvetoCubicRel" = [ "SvgPathSeg" ]; + "SvgPathSegCurvetoCubicSmoothAbs" = [ "SvgPathSeg" ]; + "SvgPathSegCurvetoCubicSmoothRel" = [ "SvgPathSeg" ]; + "SvgPathSegCurvetoQuadraticAbs" = [ "SvgPathSeg" ]; + "SvgPathSegCurvetoQuadraticRel" = [ "SvgPathSeg" ]; + "SvgPathSegCurvetoQuadraticSmoothAbs" = [ "SvgPathSeg" ]; + "SvgPathSegCurvetoQuadraticSmoothRel" = [ "SvgPathSeg" ]; + "SvgPathSegLinetoAbs" = [ "SvgPathSeg" ]; + "SvgPathSegLinetoHorizontalAbs" = [ "SvgPathSeg" ]; + "SvgPathSegLinetoHorizontalRel" = [ "SvgPathSeg" ]; + "SvgPathSegLinetoRel" = [ "SvgPathSeg" ]; + "SvgPathSegLinetoVerticalAbs" = [ "SvgPathSeg" ]; + "SvgPathSegLinetoVerticalRel" = [ "SvgPathSeg" ]; + "SvgPathSegMovetoAbs" = [ "SvgPathSeg" ]; + "SvgPathSegMovetoRel" = [ "SvgPathSeg" ]; + "SvgPatternElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgPolygonElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ]; + "SvgPolylineElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ]; + "SvgRadialGradientElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGradientElement" ]; + "SvgRectElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ]; + "SvgScriptElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgSetElement" = [ "Element" "EventTarget" "Node" "SvgAnimationElement" "SvgElement" ]; + "SvgStopElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgStyleElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgSwitchElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgSymbolElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgTextContentElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgTextElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" "SvgTextContentElement" "SvgTextPositioningElement" ]; + "SvgTextPathElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" "SvgTextContentElement" ]; + "SvgTextPositioningElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" "SvgTextContentElement" ]; + "SvgTitleElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgUseElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgViewElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgaElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgfeBlendElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeColorMatrixElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeComponentTransferElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeCompositeElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeConvolveMatrixElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeDiffuseLightingElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeDisplacementMapElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeDistantLightElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeDropShadowElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeFloodElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeFuncAElement" = [ "Element" "EventTarget" "Node" "SvgComponentTransferFunctionElement" "SvgElement" ]; + "SvgfeFuncBElement" = [ "Element" "EventTarget" "Node" "SvgComponentTransferFunctionElement" "SvgElement" ]; + "SvgfeFuncGElement" = [ "Element" "EventTarget" "Node" "SvgComponentTransferFunctionElement" "SvgElement" ]; + "SvgfeFuncRElement" = [ "Element" "EventTarget" "Node" "SvgComponentTransferFunctionElement" "SvgElement" ]; + "SvgfeGaussianBlurElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeImageElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeMergeElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeMergeNodeElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeMorphologyElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeOffsetElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfePointLightElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeSpecularLightingElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeSpotLightElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeTileElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeTurbulenceElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvggElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgmPathElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgsvgElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgtSpanElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" "SvgTextContentElement" "SvgTextPositioningElement" ]; + "TcpServerSocket" = [ "EventTarget" ]; + "TcpServerSocketEvent" = [ "Event" ]; + "TcpSocket" = [ "EventTarget" ]; + "TcpSocketErrorEvent" = [ "Event" ]; + "TcpSocketEvent" = [ "Event" ]; + "Text" = [ "CharacterData" "EventTarget" "Node" ]; + "TextTrack" = [ "EventTarget" ]; + "TextTrackCue" = [ "EventTarget" ]; + "TextTrackList" = [ "EventTarget" ]; + "TimeEvent" = [ "Event" ]; + "TouchEvent" = [ "Event" "UiEvent" ]; + "TrackEvent" = [ "Event" ]; + "TransitionEvent" = [ "Event" ]; + "UiEvent" = [ "Event" ]; + "Usb" = [ "EventTarget" ]; + "UsbConnectionEvent" = [ "Event" ]; + "UsbPermissionResult" = [ "EventTarget" "PermissionStatus" ]; + "UserProximityEvent" = [ "Event" ]; + "ValueEvent" = [ "Event" ]; + "VideoStreamTrack" = [ "EventTarget" "MediaStreamTrack" ]; + "VideoTrackList" = [ "EventTarget" ]; + "VrDisplay" = [ "EventTarget" ]; + "VttCue" = [ "EventTarget" "TextTrackCue" ]; + "WakeLockSentinel" = [ "EventTarget" ]; + "WaveShaperNode" = [ "AudioNode" "EventTarget" ]; + "WebGlContextEvent" = [ "Event" ]; + "WebKitCssMatrix" = [ "DomMatrix" "DomMatrixReadOnly" ]; + "WebSocket" = [ "EventTarget" ]; + "WheelEvent" = [ "Event" "MouseEvent" "UiEvent" ]; + "Window" = [ "EventTarget" ]; + "WindowClient" = [ "Client" ]; + "Worker" = [ "EventTarget" ]; + "WorkerDebuggerGlobalScope" = [ "EventTarget" ]; + "WorkerGlobalScope" = [ "EventTarget" ]; + "XmlDocument" = [ "Document" "EventTarget" "Node" ]; + "XmlHttpRequest" = [ "EventTarget" "XmlHttpRequestEventTarget" ]; + "XmlHttpRequestEventTarget" = [ "EventTarget" ]; + "XmlHttpRequestUpload" = [ "EventTarget" "XmlHttpRequestEventTarget" ]; + "XrBoundedReferenceSpace" = [ "EventTarget" "XrReferenceSpace" "XrSpace" ]; + "XrHand" = [ "XrHandJoint" ]; + "XrInputSourceEvent" = [ "Event" ]; + "XrInputSourcesChangeEvent" = [ "Event" ]; + "XrLayer" = [ "EventTarget" ]; + "XrPermissionStatus" = [ "EventTarget" "PermissionStatus" ]; + "XrReferenceSpace" = [ "EventTarget" "XrSpace" ]; + "XrReferenceSpaceEvent" = [ "Event" ]; + "XrSession" = [ "EventTarget" ]; + "XrSessionEvent" = [ "Event" ]; + "XrSpace" = [ "EventTarget" ]; + "XrSystem" = [ "EventTarget" ]; + "XrViewerPose" = [ "XrPose" ]; + "XrWebGlLayer" = [ "EventTarget" "XrLayer" ]; + }; + resolvedDefaultFeatures = [ "CanvasRenderingContext2d" "Document" "DomRect" "DomRectReadOnly" "Element" "EventTarget" "HtmlCanvasElement" "HtmlElement" "Node" "Window" ]; + }; + "which" = rec { + crateName = "which"; + version = "4.4.0"; + edition = "2018"; + sha256 = "0sd24r17q4j3hc2yjjrg9q4qya1y4n9zq0bj9c2rla1bqn2cfh94"; + authors = [ + "Harry Fei <tiziyuanfang@gmail.com>" + ]; + dependencies = [ + { + name = "either"; + packageId = "either"; + } + { + name = "libc"; + packageId = "libc"; + } + { + name = "once_cell"; + packageId = "once_cell"; + target = { target, features }: (target."windows" or false); + } + ]; + features = { + "regex" = [ "dep:regex" ]; + }; + }; + "winapi" = rec { + crateName = "winapi"; + version = "0.3.9"; + edition = "2015"; + sha256 = "06gl025x418lchw1wxj64ycr7gha83m44cjr5sarhynd9xkrm0sw"; + authors = [ + "Peter Atashian <retep998@gmail.com>" + ]; + dependencies = [ + { + name = "winapi-i686-pc-windows-gnu"; + packageId = "winapi-i686-pc-windows-gnu"; + target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "i686-pc-windows-gnu"); + } + { + name = "winapi-x86_64-pc-windows-gnu"; + packageId = "winapi-x86_64-pc-windows-gnu"; + target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnu"); + } + ]; + features = { + "debug" = [ "impl-debug" ]; + }; + resolvedDefaultFeatures = [ "basetsd" "consoleapi" "errhandlingapi" "fileapi" "handleapi" "knownfolders" "minwinbase" "minwindef" "ntsecapi" "ntstatus" "objbase" "processenv" "processthreadsapi" "profileapi" "shellapi" "shlobj" "std" "stringapiset" "synchapi" "winbase" "wincon" "winerror" "winnt" "winuser" "ws2ipdef" "ws2tcpip" ]; + }; + "winapi-i686-pc-windows-gnu" = rec { + crateName = "winapi-i686-pc-windows-gnu"; + version = "0.4.0"; + edition = "2015"; + sha256 = "1dmpa6mvcvzz16zg6d5vrfy4bxgg541wxrcip7cnshi06v38ffxc"; + authors = [ + "Peter Atashian <retep998@gmail.com>" + ]; + + }; + "winapi-util" = rec { + crateName = "winapi-util"; + version = "0.1.5"; + edition = "2018"; + sha256 = "0y71bp7f6d536czj40dhqk0d55wfbbwqfp2ymqf1an5ibgl6rv3h"; + authors = [ + "Andrew Gallant <jamslam@gmail.com>" + ]; + dependencies = [ + { + name = "winapi"; + packageId = "winapi"; + target = { target, features }: (target."windows" or false); + features = [ "std" "consoleapi" "errhandlingapi" "fileapi" "minwindef" "processenv" "winbase" "wincon" "winerror" "winnt" ]; + } + ]; + + }; + "winapi-x86_64-pc-windows-gnu" = rec { + crateName = "winapi-x86_64-pc-windows-gnu"; + version = "0.4.0"; + edition = "2015"; + sha256 = "0gqq64czqb64kskjryj8isp62m2sgvx25yyj3kpc2myh85w24bki"; + authors = [ + "Peter Atashian <retep998@gmail.com>" + ]; + + }; + "windows-sys 0.45.0" = rec { + crateName = "windows-sys"; + version = "0.45.0"; + edition = "2018"; + sha256 = "1l36bcqm4g89pknfp8r9rl1w4bn017q6a8qlx8viv0xjxzjkna3m"; + authors = [ + "Microsoft" + ]; + dependencies = [ + { + name = "windows-targets"; + packageId = "windows-targets 0.42.2"; + target = { target, features }: (!(target."windows_raw_dylib" or false)); + } + ]; + features = { + "Win32_Data" = [ "Win32" ]; + "Win32_Data_HtmlHelp" = [ "Win32_Data" ]; + "Win32_Data_RightsManagement" = [ "Win32_Data" ]; + "Win32_Data_Xml" = [ "Win32_Data" ]; + "Win32_Data_Xml_MsXml" = [ "Win32_Data_Xml" ]; + "Win32_Data_Xml_XmlLite" = [ "Win32_Data_Xml" ]; + "Win32_Devices" = [ "Win32" ]; + "Win32_Devices_AllJoyn" = [ "Win32_Devices" ]; + "Win32_Devices_BiometricFramework" = [ "Win32_Devices" ]; + "Win32_Devices_Bluetooth" = [ "Win32_Devices" ]; + "Win32_Devices_Communication" = [ "Win32_Devices" ]; + "Win32_Devices_DeviceAccess" = [ "Win32_Devices" ]; + "Win32_Devices_DeviceAndDriverInstallation" = [ "Win32_Devices" ]; + "Win32_Devices_DeviceQuery" = [ "Win32_Devices" ]; + "Win32_Devices_Display" = [ "Win32_Devices" ]; + "Win32_Devices_Enumeration" = [ "Win32_Devices" ]; + "Win32_Devices_Enumeration_Pnp" = [ "Win32_Devices_Enumeration" ]; + "Win32_Devices_Fax" = [ "Win32_Devices" ]; + "Win32_Devices_FunctionDiscovery" = [ "Win32_Devices" ]; + "Win32_Devices_Geolocation" = [ "Win32_Devices" ]; + "Win32_Devices_HumanInterfaceDevice" = [ "Win32_Devices" ]; + "Win32_Devices_ImageAcquisition" = [ "Win32_Devices" ]; + "Win32_Devices_PortableDevices" = [ "Win32_Devices" ]; + "Win32_Devices_Properties" = [ "Win32_Devices" ]; + "Win32_Devices_Pwm" = [ "Win32_Devices" ]; + "Win32_Devices_Sensors" = [ "Win32_Devices" ]; + "Win32_Devices_SerialCommunication" = [ "Win32_Devices" ]; + "Win32_Devices_Tapi" = [ "Win32_Devices" ]; + "Win32_Devices_Usb" = [ "Win32_Devices" ]; + "Win32_Devices_WebServicesOnDevices" = [ "Win32_Devices" ]; + "Win32_Foundation" = [ "Win32" ]; + "Win32_Gaming" = [ "Win32" ]; + "Win32_Globalization" = [ "Win32" ]; + "Win32_Graphics" = [ "Win32" ]; + "Win32_Graphics_Dwm" = [ "Win32_Graphics" ]; + "Win32_Graphics_Gdi" = [ "Win32_Graphics" ]; + "Win32_Graphics_Hlsl" = [ "Win32_Graphics" ]; + "Win32_Graphics_OpenGL" = [ "Win32_Graphics" ]; + "Win32_Graphics_Printing" = [ "Win32_Graphics" ]; + "Win32_Graphics_Printing_PrintTicket" = [ "Win32_Graphics_Printing" ]; + "Win32_Management" = [ "Win32" ]; + "Win32_Management_MobileDeviceManagementRegistration" = [ "Win32_Management" ]; + "Win32_Media" = [ "Win32" ]; + "Win32_Media_Audio" = [ "Win32_Media" ]; + "Win32_Media_Audio_Apo" = [ "Win32_Media_Audio" ]; + "Win32_Media_Audio_DirectMusic" = [ "Win32_Media_Audio" ]; + "Win32_Media_Audio_Endpoints" = [ "Win32_Media_Audio" ]; + "Win32_Media_Audio_XAudio2" = [ "Win32_Media_Audio" ]; + "Win32_Media_DeviceManager" = [ "Win32_Media" ]; + "Win32_Media_DxMediaObjects" = [ "Win32_Media" ]; + "Win32_Media_KernelStreaming" = [ "Win32_Media" ]; + "Win32_Media_LibrarySharingServices" = [ "Win32_Media" ]; + "Win32_Media_MediaPlayer" = [ "Win32_Media" ]; + "Win32_Media_Multimedia" = [ "Win32_Media" ]; + "Win32_Media_Speech" = [ "Win32_Media" ]; + "Win32_Media_Streaming" = [ "Win32_Media" ]; + "Win32_Media_WindowsMediaFormat" = [ "Win32_Media" ]; + "Win32_NetworkManagement" = [ "Win32" ]; + "Win32_NetworkManagement_Dhcp" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_Dns" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_InternetConnectionWizard" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_IpHelper" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_MobileBroadband" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_Multicast" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_Ndis" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_NetBios" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_NetManagement" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_NetShell" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_NetworkDiagnosticsFramework" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_NetworkPolicyServer" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_P2P" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_QoS" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_Rras" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_Snmp" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_WNet" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_WebDav" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_WiFi" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_WindowsConnectNow" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_WindowsConnectionManager" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_WindowsFilteringPlatform" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_WindowsFirewall" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_WindowsNetworkVirtualization" = [ "Win32_NetworkManagement" ]; + "Win32_Networking" = [ "Win32" ]; + "Win32_Networking_ActiveDirectory" = [ "Win32_Networking" ]; + "Win32_Networking_BackgroundIntelligentTransferService" = [ "Win32_Networking" ]; + "Win32_Networking_Clustering" = [ "Win32_Networking" ]; + "Win32_Networking_HttpServer" = [ "Win32_Networking" ]; + "Win32_Networking_Ldap" = [ "Win32_Networking" ]; + "Win32_Networking_NetworkListManager" = [ "Win32_Networking" ]; + "Win32_Networking_RemoteDifferentialCompression" = [ "Win32_Networking" ]; + "Win32_Networking_WebSocket" = [ "Win32_Networking" ]; + "Win32_Networking_WinHttp" = [ "Win32_Networking" ]; + "Win32_Networking_WinInet" = [ "Win32_Networking" ]; + "Win32_Networking_WinSock" = [ "Win32_Networking" ]; + "Win32_Networking_WindowsWebServices" = [ "Win32_Networking" ]; + "Win32_Security" = [ "Win32" ]; + "Win32_Security_AppLocker" = [ "Win32_Security" ]; + "Win32_Security_Authentication" = [ "Win32_Security" ]; + "Win32_Security_Authentication_Identity" = [ "Win32_Security_Authentication" ]; + "Win32_Security_Authentication_Identity_Provider" = [ "Win32_Security_Authentication_Identity" ]; + "Win32_Security_Authorization" = [ "Win32_Security" ]; + "Win32_Security_Authorization_UI" = [ "Win32_Security_Authorization" ]; + "Win32_Security_ConfigurationSnapin" = [ "Win32_Security" ]; + "Win32_Security_Credentials" = [ "Win32_Security" ]; + "Win32_Security_Cryptography" = [ "Win32_Security" ]; + "Win32_Security_Cryptography_Catalog" = [ "Win32_Security_Cryptography" ]; + "Win32_Security_Cryptography_Certificates" = [ "Win32_Security_Cryptography" ]; + "Win32_Security_Cryptography_Sip" = [ "Win32_Security_Cryptography" ]; + "Win32_Security_Cryptography_UI" = [ "Win32_Security_Cryptography" ]; + "Win32_Security_DiagnosticDataQuery" = [ "Win32_Security" ]; + "Win32_Security_DirectoryServices" = [ "Win32_Security" ]; + "Win32_Security_EnterpriseData" = [ "Win32_Security" ]; + "Win32_Security_ExtensibleAuthenticationProtocol" = [ "Win32_Security" ]; + "Win32_Security_Isolation" = [ "Win32_Security" ]; + "Win32_Security_LicenseProtection" = [ "Win32_Security" ]; + "Win32_Security_NetworkAccessProtection" = [ "Win32_Security" ]; + "Win32_Security_Tpm" = [ "Win32_Security" ]; + "Win32_Security_WinTrust" = [ "Win32_Security" ]; + "Win32_Security_WinWlx" = [ "Win32_Security" ]; + "Win32_Storage" = [ "Win32" ]; + "Win32_Storage_Cabinets" = [ "Win32_Storage" ]; + "Win32_Storage_CloudFilters" = [ "Win32_Storage" ]; + "Win32_Storage_Compression" = [ "Win32_Storage" ]; + "Win32_Storage_DataDeduplication" = [ "Win32_Storage" ]; + "Win32_Storage_DistributedFileSystem" = [ "Win32_Storage" ]; + "Win32_Storage_EnhancedStorage" = [ "Win32_Storage" ]; + "Win32_Storage_FileHistory" = [ "Win32_Storage" ]; + "Win32_Storage_FileServerResourceManager" = [ "Win32_Storage" ]; + "Win32_Storage_FileSystem" = [ "Win32_Storage" ]; + "Win32_Storage_Imapi" = [ "Win32_Storage" ]; + "Win32_Storage_IndexServer" = [ "Win32_Storage" ]; + "Win32_Storage_InstallableFileSystems" = [ "Win32_Storage" ]; + "Win32_Storage_IscsiDisc" = [ "Win32_Storage" ]; + "Win32_Storage_Jet" = [ "Win32_Storage" ]; + "Win32_Storage_OfflineFiles" = [ "Win32_Storage" ]; + "Win32_Storage_OperationRecorder" = [ "Win32_Storage" ]; + "Win32_Storage_Packaging" = [ "Win32_Storage" ]; + "Win32_Storage_Packaging_Appx" = [ "Win32_Storage_Packaging" ]; + "Win32_Storage_Packaging_Opc" = [ "Win32_Storage_Packaging" ]; + "Win32_Storage_ProjectedFileSystem" = [ "Win32_Storage" ]; + "Win32_Storage_StructuredStorage" = [ "Win32_Storage" ]; + "Win32_Storage_Vhd" = [ "Win32_Storage" ]; + "Win32_Storage_VirtualDiskService" = [ "Win32_Storage" ]; + "Win32_Storage_Vss" = [ "Win32_Storage" ]; + "Win32_Storage_Xps" = [ "Win32_Storage" ]; + "Win32_Storage_Xps_Printing" = [ "Win32_Storage_Xps" ]; + "Win32_System" = [ "Win32" ]; + "Win32_System_AddressBook" = [ "Win32_System" ]; + "Win32_System_Antimalware" = [ "Win32_System" ]; + "Win32_System_ApplicationInstallationAndServicing" = [ "Win32_System" ]; + "Win32_System_ApplicationVerifier" = [ "Win32_System" ]; + "Win32_System_AssessmentTool" = [ "Win32_System" ]; + "Win32_System_Com" = [ "Win32_System" ]; + "Win32_System_Com_CallObj" = [ "Win32_System_Com" ]; + "Win32_System_Com_ChannelCredentials" = [ "Win32_System_Com" ]; + "Win32_System_Com_Events" = [ "Win32_System_Com" ]; + "Win32_System_Com_Marshal" = [ "Win32_System_Com" ]; + "Win32_System_Com_StructuredStorage" = [ "Win32_System_Com" ]; + "Win32_System_Com_UI" = [ "Win32_System_Com" ]; + "Win32_System_Com_Urlmon" = [ "Win32_System_Com" ]; + "Win32_System_ComponentServices" = [ "Win32_System" ]; + "Win32_System_Console" = [ "Win32_System" ]; + "Win32_System_Contacts" = [ "Win32_System" ]; + "Win32_System_CorrelationVector" = [ "Win32_System" ]; + "Win32_System_DataExchange" = [ "Win32_System" ]; + "Win32_System_DeploymentServices" = [ "Win32_System" ]; + "Win32_System_DesktopSharing" = [ "Win32_System" ]; + "Win32_System_DeveloperLicensing" = [ "Win32_System" ]; + "Win32_System_Diagnostics" = [ "Win32_System" ]; + "Win32_System_Diagnostics_Ceip" = [ "Win32_System_Diagnostics" ]; + "Win32_System_Diagnostics_Debug" = [ "Win32_System_Diagnostics" ]; + "Win32_System_Diagnostics_Etw" = [ "Win32_System_Diagnostics" ]; + "Win32_System_Diagnostics_ProcessSnapshotting" = [ "Win32_System_Diagnostics" ]; + "Win32_System_Diagnostics_ToolHelp" = [ "Win32_System_Diagnostics" ]; + "Win32_System_DistributedTransactionCoordinator" = [ "Win32_System" ]; + "Win32_System_Environment" = [ "Win32_System" ]; + "Win32_System_ErrorReporting" = [ "Win32_System" ]; + "Win32_System_EventCollector" = [ "Win32_System" ]; + "Win32_System_EventLog" = [ "Win32_System" ]; + "Win32_System_EventNotificationService" = [ "Win32_System" ]; + "Win32_System_GroupPolicy" = [ "Win32_System" ]; + "Win32_System_HostCompute" = [ "Win32_System" ]; + "Win32_System_HostComputeNetwork" = [ "Win32_System" ]; + "Win32_System_HostComputeSystem" = [ "Win32_System" ]; + "Win32_System_Hypervisor" = [ "Win32_System" ]; + "Win32_System_IO" = [ "Win32_System" ]; + "Win32_System_Iis" = [ "Win32_System" ]; + "Win32_System_Ioctl" = [ "Win32_System" ]; + "Win32_System_JobObjects" = [ "Win32_System" ]; + "Win32_System_Js" = [ "Win32_System" ]; + "Win32_System_Kernel" = [ "Win32_System" ]; + "Win32_System_LibraryLoader" = [ "Win32_System" ]; + "Win32_System_Mailslots" = [ "Win32_System" ]; + "Win32_System_Mapi" = [ "Win32_System" ]; + "Win32_System_Memory" = [ "Win32_System" ]; + "Win32_System_Memory_NonVolatile" = [ "Win32_System_Memory" ]; + "Win32_System_MessageQueuing" = [ "Win32_System" ]; + "Win32_System_MixedReality" = [ "Win32_System" ]; + "Win32_System_Mmc" = [ "Win32_System" ]; + "Win32_System_Ole" = [ "Win32_System" ]; + "Win32_System_ParentalControls" = [ "Win32_System" ]; + "Win32_System_PasswordManagement" = [ "Win32_System" ]; + "Win32_System_Performance" = [ "Win32_System" ]; + "Win32_System_Performance_HardwareCounterProfiling" = [ "Win32_System_Performance" ]; + "Win32_System_Pipes" = [ "Win32_System" ]; + "Win32_System_Power" = [ "Win32_System" ]; + "Win32_System_ProcessStatus" = [ "Win32_System" ]; + "Win32_System_RealTimeCommunications" = [ "Win32_System" ]; + "Win32_System_Recovery" = [ "Win32_System" ]; + "Win32_System_Registry" = [ "Win32_System" ]; + "Win32_System_RemoteAssistance" = [ "Win32_System" ]; + "Win32_System_RemoteDesktop" = [ "Win32_System" ]; + "Win32_System_RemoteManagement" = [ "Win32_System" ]; + "Win32_System_RestartManager" = [ "Win32_System" ]; + "Win32_System_Restore" = [ "Win32_System" ]; + "Win32_System_Rpc" = [ "Win32_System" ]; + "Win32_System_Search" = [ "Win32_System" ]; + "Win32_System_Search_Common" = [ "Win32_System_Search" ]; + "Win32_System_SecurityCenter" = [ "Win32_System" ]; + "Win32_System_ServerBackup" = [ "Win32_System" ]; + "Win32_System_Services" = [ "Win32_System" ]; + "Win32_System_SettingsManagementInfrastructure" = [ "Win32_System" ]; + "Win32_System_SetupAndMigration" = [ "Win32_System" ]; + "Win32_System_Shutdown" = [ "Win32_System" ]; + "Win32_System_StationsAndDesktops" = [ "Win32_System" ]; + "Win32_System_SubsystemForLinux" = [ "Win32_System" ]; + "Win32_System_SystemInformation" = [ "Win32_System" ]; + "Win32_System_SystemServices" = [ "Win32_System" ]; + "Win32_System_TaskScheduler" = [ "Win32_System" ]; + "Win32_System_Threading" = [ "Win32_System" ]; + "Win32_System_Time" = [ "Win32_System" ]; + "Win32_System_TpmBaseServices" = [ "Win32_System" ]; + "Win32_System_UpdateAgent" = [ "Win32_System" ]; + "Win32_System_UpdateAssessment" = [ "Win32_System" ]; + "Win32_System_UserAccessLogging" = [ "Win32_System" ]; + "Win32_System_VirtualDosMachines" = [ "Win32_System" ]; + "Win32_System_WindowsProgramming" = [ "Win32_System" ]; + "Win32_System_WindowsSync" = [ "Win32_System" ]; + "Win32_System_Wmi" = [ "Win32_System" ]; + "Win32_UI" = [ "Win32" ]; + "Win32_UI_Accessibility" = [ "Win32_UI" ]; + "Win32_UI_Animation" = [ "Win32_UI" ]; + "Win32_UI_ColorSystem" = [ "Win32_UI" ]; + "Win32_UI_Controls" = [ "Win32_UI" ]; + "Win32_UI_Controls_Dialogs" = [ "Win32_UI_Controls" ]; + "Win32_UI_Controls_RichEdit" = [ "Win32_UI_Controls" ]; + "Win32_UI_HiDpi" = [ "Win32_UI" ]; + "Win32_UI_Input" = [ "Win32_UI" ]; + "Win32_UI_Input_Ime" = [ "Win32_UI_Input" ]; + "Win32_UI_Input_Ink" = [ "Win32_UI_Input" ]; + "Win32_UI_Input_KeyboardAndMouse" = [ "Win32_UI_Input" ]; + "Win32_UI_Input_Pointer" = [ "Win32_UI_Input" ]; + "Win32_UI_Input_Radial" = [ "Win32_UI_Input" ]; + "Win32_UI_Input_Touch" = [ "Win32_UI_Input" ]; + "Win32_UI_Input_XboxController" = [ "Win32_UI_Input" ]; + "Win32_UI_InteractionContext" = [ "Win32_UI" ]; + "Win32_UI_LegacyWindowsEnvironmentFeatures" = [ "Win32_UI" ]; + "Win32_UI_Magnification" = [ "Win32_UI" ]; + "Win32_UI_Notifications" = [ "Win32_UI" ]; + "Win32_UI_Ribbon" = [ "Win32_UI" ]; + "Win32_UI_Shell" = [ "Win32_UI" ]; + "Win32_UI_Shell_Common" = [ "Win32_UI_Shell" ]; + "Win32_UI_Shell_PropertiesSystem" = [ "Win32_UI_Shell" ]; + "Win32_UI_TabletPC" = [ "Win32_UI" ]; + "Win32_UI_TextServices" = [ "Win32_UI" ]; + "Win32_UI_WindowsAndMessaging" = [ "Win32_UI" ]; + "Win32_UI_Wpf" = [ "Win32_UI" ]; + }; + resolvedDefaultFeatures = [ "Win32" "Win32_Foundation" "Win32_Networking" "Win32_Networking_WinSock" "Win32_Security" "Win32_Storage" "Win32_Storage_FileSystem" "Win32_System" "Win32_System_IO" "Win32_System_Pipes" "Win32_System_WindowsProgramming" "default" ]; + }; + "windows-sys 0.48.0" = rec { + crateName = "windows-sys"; + version = "0.48.0"; + edition = "2018"; + sha256 = "1aan23v5gs7gya1lc46hqn9mdh8yph3fhxmhxlw36pn6pqc28zb7"; + authors = [ + "Microsoft" + ]; + dependencies = [ + { + name = "windows-targets"; + packageId = "windows-targets 0.48.0"; + } + ]; + features = { + "Wdk_System" = [ "Wdk" ]; + "Wdk_System_OfflineRegistry" = [ "Wdk_System" ]; + "Win32_Data" = [ "Win32" ]; + "Win32_Data_HtmlHelp" = [ "Win32_Data" ]; + "Win32_Data_RightsManagement" = [ "Win32_Data" ]; + "Win32_Data_Xml" = [ "Win32_Data" ]; + "Win32_Data_Xml_MsXml" = [ "Win32_Data_Xml" ]; + "Win32_Data_Xml_XmlLite" = [ "Win32_Data_Xml" ]; + "Win32_Devices" = [ "Win32" ]; + "Win32_Devices_AllJoyn" = [ "Win32_Devices" ]; + "Win32_Devices_BiometricFramework" = [ "Win32_Devices" ]; + "Win32_Devices_Bluetooth" = [ "Win32_Devices" ]; + "Win32_Devices_Communication" = [ "Win32_Devices" ]; + "Win32_Devices_DeviceAccess" = [ "Win32_Devices" ]; + "Win32_Devices_DeviceAndDriverInstallation" = [ "Win32_Devices" ]; + "Win32_Devices_DeviceQuery" = [ "Win32_Devices" ]; + "Win32_Devices_Display" = [ "Win32_Devices" ]; + "Win32_Devices_Enumeration" = [ "Win32_Devices" ]; + "Win32_Devices_Enumeration_Pnp" = [ "Win32_Devices_Enumeration" ]; + "Win32_Devices_Fax" = [ "Win32_Devices" ]; + "Win32_Devices_FunctionDiscovery" = [ "Win32_Devices" ]; + "Win32_Devices_Geolocation" = [ "Win32_Devices" ]; + "Win32_Devices_HumanInterfaceDevice" = [ "Win32_Devices" ]; + "Win32_Devices_ImageAcquisition" = [ "Win32_Devices" ]; + "Win32_Devices_PortableDevices" = [ "Win32_Devices" ]; + "Win32_Devices_Properties" = [ "Win32_Devices" ]; + "Win32_Devices_Pwm" = [ "Win32_Devices" ]; + "Win32_Devices_Sensors" = [ "Win32_Devices" ]; + "Win32_Devices_SerialCommunication" = [ "Win32_Devices" ]; + "Win32_Devices_Tapi" = [ "Win32_Devices" ]; + "Win32_Devices_Usb" = [ "Win32_Devices" ]; + "Win32_Devices_WebServicesOnDevices" = [ "Win32_Devices" ]; + "Win32_Foundation" = [ "Win32" ]; + "Win32_Gaming" = [ "Win32" ]; + "Win32_Globalization" = [ "Win32" ]; + "Win32_Graphics" = [ "Win32" ]; + "Win32_Graphics_Dwm" = [ "Win32_Graphics" ]; + "Win32_Graphics_Gdi" = [ "Win32_Graphics" ]; + "Win32_Graphics_Hlsl" = [ "Win32_Graphics" ]; + "Win32_Graphics_OpenGL" = [ "Win32_Graphics" ]; + "Win32_Graphics_Printing" = [ "Win32_Graphics" ]; + "Win32_Graphics_Printing_PrintTicket" = [ "Win32_Graphics_Printing" ]; + "Win32_Management" = [ "Win32" ]; + "Win32_Management_MobileDeviceManagementRegistration" = [ "Win32_Management" ]; + "Win32_Media" = [ "Win32" ]; + "Win32_Media_Audio" = [ "Win32_Media" ]; + "Win32_Media_Audio_Apo" = [ "Win32_Media_Audio" ]; + "Win32_Media_Audio_DirectMusic" = [ "Win32_Media_Audio" ]; + "Win32_Media_Audio_Endpoints" = [ "Win32_Media_Audio" ]; + "Win32_Media_Audio_XAudio2" = [ "Win32_Media_Audio" ]; + "Win32_Media_DeviceManager" = [ "Win32_Media" ]; + "Win32_Media_DxMediaObjects" = [ "Win32_Media" ]; + "Win32_Media_KernelStreaming" = [ "Win32_Media" ]; + "Win32_Media_LibrarySharingServices" = [ "Win32_Media" ]; + "Win32_Media_MediaPlayer" = [ "Win32_Media" ]; + "Win32_Media_Multimedia" = [ "Win32_Media" ]; + "Win32_Media_Speech" = [ "Win32_Media" ]; + "Win32_Media_Streaming" = [ "Win32_Media" ]; + "Win32_Media_WindowsMediaFormat" = [ "Win32_Media" ]; + "Win32_NetworkManagement" = [ "Win32" ]; + "Win32_NetworkManagement_Dhcp" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_Dns" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_InternetConnectionWizard" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_IpHelper" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_MobileBroadband" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_Multicast" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_Ndis" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_NetBios" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_NetManagement" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_NetShell" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_NetworkDiagnosticsFramework" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_NetworkPolicyServer" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_P2P" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_QoS" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_Rras" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_Snmp" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_WNet" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_WebDav" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_WiFi" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_WindowsConnectNow" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_WindowsConnectionManager" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_WindowsFilteringPlatform" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_WindowsFirewall" = [ "Win32_NetworkManagement" ]; + "Win32_NetworkManagement_WindowsNetworkVirtualization" = [ "Win32_NetworkManagement" ]; + "Win32_Networking" = [ "Win32" ]; + "Win32_Networking_ActiveDirectory" = [ "Win32_Networking" ]; + "Win32_Networking_BackgroundIntelligentTransferService" = [ "Win32_Networking" ]; + "Win32_Networking_Clustering" = [ "Win32_Networking" ]; + "Win32_Networking_HttpServer" = [ "Win32_Networking" ]; + "Win32_Networking_Ldap" = [ "Win32_Networking" ]; + "Win32_Networking_NetworkListManager" = [ "Win32_Networking" ]; + "Win32_Networking_RemoteDifferentialCompression" = [ "Win32_Networking" ]; + "Win32_Networking_WebSocket" = [ "Win32_Networking" ]; + "Win32_Networking_WinHttp" = [ "Win32_Networking" ]; + "Win32_Networking_WinInet" = [ "Win32_Networking" ]; + "Win32_Networking_WinSock" = [ "Win32_Networking" ]; + "Win32_Networking_WindowsWebServices" = [ "Win32_Networking" ]; + "Win32_Security" = [ "Win32" ]; + "Win32_Security_AppLocker" = [ "Win32_Security" ]; + "Win32_Security_Authentication" = [ "Win32_Security" ]; + "Win32_Security_Authentication_Identity" = [ "Win32_Security_Authentication" ]; + "Win32_Security_Authentication_Identity_Provider" = [ "Win32_Security_Authentication_Identity" ]; + "Win32_Security_Authorization" = [ "Win32_Security" ]; + "Win32_Security_Authorization_UI" = [ "Win32_Security_Authorization" ]; + "Win32_Security_ConfigurationSnapin" = [ "Win32_Security" ]; + "Win32_Security_Credentials" = [ "Win32_Security" ]; + "Win32_Security_Cryptography" = [ "Win32_Security" ]; + "Win32_Security_Cryptography_Catalog" = [ "Win32_Security_Cryptography" ]; + "Win32_Security_Cryptography_Certificates" = [ "Win32_Security_Cryptography" ]; + "Win32_Security_Cryptography_Sip" = [ "Win32_Security_Cryptography" ]; + "Win32_Security_Cryptography_UI" = [ "Win32_Security_Cryptography" ]; + "Win32_Security_DiagnosticDataQuery" = [ "Win32_Security" ]; + "Win32_Security_DirectoryServices" = [ "Win32_Security" ]; + "Win32_Security_EnterpriseData" = [ "Win32_Security" ]; + "Win32_Security_ExtensibleAuthenticationProtocol" = [ "Win32_Security" ]; + "Win32_Security_Isolation" = [ "Win32_Security" ]; + "Win32_Security_LicenseProtection" = [ "Win32_Security" ]; + "Win32_Security_NetworkAccessProtection" = [ "Win32_Security" ]; + "Win32_Security_Tpm" = [ "Win32_Security" ]; + "Win32_Security_WinTrust" = [ "Win32_Security" ]; + "Win32_Security_WinWlx" = [ "Win32_Security" ]; + "Win32_Storage" = [ "Win32" ]; + "Win32_Storage_Cabinets" = [ "Win32_Storage" ]; + "Win32_Storage_CloudFilters" = [ "Win32_Storage" ]; + "Win32_Storage_Compression" = [ "Win32_Storage" ]; + "Win32_Storage_DataDeduplication" = [ "Win32_Storage" ]; + "Win32_Storage_DistributedFileSystem" = [ "Win32_Storage" ]; + "Win32_Storage_EnhancedStorage" = [ "Win32_Storage" ]; + "Win32_Storage_FileHistory" = [ "Win32_Storage" ]; + "Win32_Storage_FileServerResourceManager" = [ "Win32_Storage" ]; + "Win32_Storage_FileSystem" = [ "Win32_Storage" ]; + "Win32_Storage_Imapi" = [ "Win32_Storage" ]; + "Win32_Storage_IndexServer" = [ "Win32_Storage" ]; + "Win32_Storage_InstallableFileSystems" = [ "Win32_Storage" ]; + "Win32_Storage_IscsiDisc" = [ "Win32_Storage" ]; + "Win32_Storage_Jet" = [ "Win32_Storage" ]; + "Win32_Storage_OfflineFiles" = [ "Win32_Storage" ]; + "Win32_Storage_OperationRecorder" = [ "Win32_Storage" ]; + "Win32_Storage_Packaging" = [ "Win32_Storage" ]; + "Win32_Storage_Packaging_Appx" = [ "Win32_Storage_Packaging" ]; + "Win32_Storage_Packaging_Opc" = [ "Win32_Storage_Packaging" ]; + "Win32_Storage_ProjectedFileSystem" = [ "Win32_Storage" ]; + "Win32_Storage_StructuredStorage" = [ "Win32_Storage" ]; + "Win32_Storage_Vhd" = [ "Win32_Storage" ]; + "Win32_Storage_VirtualDiskService" = [ "Win32_Storage" ]; + "Win32_Storage_Vss" = [ "Win32_Storage" ]; + "Win32_Storage_Xps" = [ "Win32_Storage" ]; + "Win32_Storage_Xps_Printing" = [ "Win32_Storage_Xps" ]; + "Win32_System" = [ "Win32" ]; + "Win32_System_AddressBook" = [ "Win32_System" ]; + "Win32_System_Antimalware" = [ "Win32_System" ]; + "Win32_System_ApplicationInstallationAndServicing" = [ "Win32_System" ]; + "Win32_System_ApplicationVerifier" = [ "Win32_System" ]; + "Win32_System_AssessmentTool" = [ "Win32_System" ]; + "Win32_System_ClrHosting" = [ "Win32_System" ]; + "Win32_System_Com" = [ "Win32_System" ]; + "Win32_System_Com_CallObj" = [ "Win32_System_Com" ]; + "Win32_System_Com_ChannelCredentials" = [ "Win32_System_Com" ]; + "Win32_System_Com_Events" = [ "Win32_System_Com" ]; + "Win32_System_Com_Marshal" = [ "Win32_System_Com" ]; + "Win32_System_Com_StructuredStorage" = [ "Win32_System_Com" ]; + "Win32_System_Com_UI" = [ "Win32_System_Com" ]; + "Win32_System_Com_Urlmon" = [ "Win32_System_Com" ]; + "Win32_System_ComponentServices" = [ "Win32_System" ]; + "Win32_System_Console" = [ "Win32_System" ]; + "Win32_System_Contacts" = [ "Win32_System" ]; + "Win32_System_CorrelationVector" = [ "Win32_System" ]; + "Win32_System_DataExchange" = [ "Win32_System" ]; + "Win32_System_DeploymentServices" = [ "Win32_System" ]; + "Win32_System_DesktopSharing" = [ "Win32_System" ]; + "Win32_System_DeveloperLicensing" = [ "Win32_System" ]; + "Win32_System_Diagnostics" = [ "Win32_System" ]; + "Win32_System_Diagnostics_Ceip" = [ "Win32_System_Diagnostics" ]; + "Win32_System_Diagnostics_ClrProfiling" = [ "Win32_System_Diagnostics" ]; + "Win32_System_Diagnostics_Debug" = [ "Win32_System_Diagnostics" ]; + "Win32_System_Diagnostics_Debug_ActiveScript" = [ "Win32_System_Diagnostics_Debug" ]; + "Win32_System_Diagnostics_Debug_Extensions" = [ "Win32_System_Diagnostics_Debug" ]; + "Win32_System_Diagnostics_Etw" = [ "Win32_System_Diagnostics" ]; + "Win32_System_Diagnostics_ProcessSnapshotting" = [ "Win32_System_Diagnostics" ]; + "Win32_System_Diagnostics_ToolHelp" = [ "Win32_System_Diagnostics" ]; + "Win32_System_DistributedTransactionCoordinator" = [ "Win32_System" ]; + "Win32_System_Environment" = [ "Win32_System" ]; + "Win32_System_ErrorReporting" = [ "Win32_System" ]; + "Win32_System_EventCollector" = [ "Win32_System" ]; + "Win32_System_EventLog" = [ "Win32_System" ]; + "Win32_System_EventNotificationService" = [ "Win32_System" ]; + "Win32_System_GroupPolicy" = [ "Win32_System" ]; + "Win32_System_HostCompute" = [ "Win32_System" ]; + "Win32_System_HostComputeNetwork" = [ "Win32_System" ]; + "Win32_System_HostComputeSystem" = [ "Win32_System" ]; + "Win32_System_Hypervisor" = [ "Win32_System" ]; + "Win32_System_IO" = [ "Win32_System" ]; + "Win32_System_Iis" = [ "Win32_System" ]; + "Win32_System_Ioctl" = [ "Win32_System" ]; + "Win32_System_JobObjects" = [ "Win32_System" ]; + "Win32_System_Js" = [ "Win32_System" ]; + "Win32_System_Kernel" = [ "Win32_System" ]; + "Win32_System_LibraryLoader" = [ "Win32_System" ]; + "Win32_System_Mailslots" = [ "Win32_System" ]; + "Win32_System_Mapi" = [ "Win32_System" ]; + "Win32_System_Memory" = [ "Win32_System" ]; + "Win32_System_Memory_NonVolatile" = [ "Win32_System_Memory" ]; + "Win32_System_MessageQueuing" = [ "Win32_System" ]; + "Win32_System_MixedReality" = [ "Win32_System" ]; + "Win32_System_Mmc" = [ "Win32_System" ]; + "Win32_System_Ole" = [ "Win32_System" ]; + "Win32_System_ParentalControls" = [ "Win32_System" ]; + "Win32_System_PasswordManagement" = [ "Win32_System" ]; + "Win32_System_Performance" = [ "Win32_System" ]; + "Win32_System_Performance_HardwareCounterProfiling" = [ "Win32_System_Performance" ]; + "Win32_System_Pipes" = [ "Win32_System" ]; + "Win32_System_Power" = [ "Win32_System" ]; + "Win32_System_ProcessStatus" = [ "Win32_System" ]; + "Win32_System_RealTimeCommunications" = [ "Win32_System" ]; + "Win32_System_Recovery" = [ "Win32_System" ]; + "Win32_System_Registry" = [ "Win32_System" ]; + "Win32_System_RemoteAssistance" = [ "Win32_System" ]; + "Win32_System_RemoteDesktop" = [ "Win32_System" ]; + "Win32_System_RemoteManagement" = [ "Win32_System" ]; + "Win32_System_RestartManager" = [ "Win32_System" ]; + "Win32_System_Restore" = [ "Win32_System" ]; + "Win32_System_Rpc" = [ "Win32_System" ]; + "Win32_System_Search" = [ "Win32_System" ]; + "Win32_System_Search_Common" = [ "Win32_System_Search" ]; + "Win32_System_SecurityCenter" = [ "Win32_System" ]; + "Win32_System_ServerBackup" = [ "Win32_System" ]; + "Win32_System_Services" = [ "Win32_System" ]; + "Win32_System_SettingsManagementInfrastructure" = [ "Win32_System" ]; + "Win32_System_SetupAndMigration" = [ "Win32_System" ]; + "Win32_System_Shutdown" = [ "Win32_System" ]; + "Win32_System_StationsAndDesktops" = [ "Win32_System" ]; + "Win32_System_SubsystemForLinux" = [ "Win32_System" ]; + "Win32_System_SystemInformation" = [ "Win32_System" ]; + "Win32_System_SystemServices" = [ "Win32_System" ]; + "Win32_System_TaskScheduler" = [ "Win32_System" ]; + "Win32_System_Threading" = [ "Win32_System" ]; + "Win32_System_Time" = [ "Win32_System" ]; + "Win32_System_TpmBaseServices" = [ "Win32_System" ]; + "Win32_System_UpdateAgent" = [ "Win32_System" ]; + "Win32_System_UpdateAssessment" = [ "Win32_System" ]; + "Win32_System_UserAccessLogging" = [ "Win32_System" ]; + "Win32_System_VirtualDosMachines" = [ "Win32_System" ]; + "Win32_System_WindowsProgramming" = [ "Win32_System" ]; + "Win32_System_WindowsSync" = [ "Win32_System" ]; + "Win32_System_Wmi" = [ "Win32_System" ]; + "Win32_UI" = [ "Win32" ]; + "Win32_UI_Accessibility" = [ "Win32_UI" ]; + "Win32_UI_Animation" = [ "Win32_UI" ]; + "Win32_UI_ColorSystem" = [ "Win32_UI" ]; + "Win32_UI_Controls" = [ "Win32_UI" ]; + "Win32_UI_Controls_Dialogs" = [ "Win32_UI_Controls" ]; + "Win32_UI_Controls_RichEdit" = [ "Win32_UI_Controls" ]; + "Win32_UI_HiDpi" = [ "Win32_UI" ]; + "Win32_UI_Input" = [ "Win32_UI" ]; + "Win32_UI_Input_Ime" = [ "Win32_UI_Input" ]; + "Win32_UI_Input_Ink" = [ "Win32_UI_Input" ]; + "Win32_UI_Input_KeyboardAndMouse" = [ "Win32_UI_Input" ]; + "Win32_UI_Input_Pointer" = [ "Win32_UI_Input" ]; + "Win32_UI_Input_Radial" = [ "Win32_UI_Input" ]; + "Win32_UI_Input_Touch" = [ "Win32_UI_Input" ]; + "Win32_UI_Input_XboxController" = [ "Win32_UI_Input" ]; + "Win32_UI_InteractionContext" = [ "Win32_UI" ]; + "Win32_UI_LegacyWindowsEnvironmentFeatures" = [ "Win32_UI" ]; + "Win32_UI_Magnification" = [ "Win32_UI" ]; + "Win32_UI_Notifications" = [ "Win32_UI" ]; + "Win32_UI_Ribbon" = [ "Win32_UI" ]; + "Win32_UI_Shell" = [ "Win32_UI" ]; + "Win32_UI_Shell_Common" = [ "Win32_UI_Shell" ]; + "Win32_UI_Shell_PropertiesSystem" = [ "Win32_UI_Shell" ]; + "Win32_UI_TabletPC" = [ "Win32_UI" ]; + "Win32_UI_TextServices" = [ "Win32_UI" ]; + "Win32_UI_WindowsAndMessaging" = [ "Win32_UI" ]; + "Win32_UI_Wpf" = [ "Win32_UI" ]; + "Win32_Web" = [ "Win32" ]; + "Win32_Web_InternetExplorer" = [ "Win32_Web" ]; + }; + resolvedDefaultFeatures = [ "Win32" "Win32_Foundation" "Win32_NetworkManagement" "Win32_NetworkManagement_IpHelper" "Win32_Networking" "Win32_Networking_WinSock" "Win32_Security" "Win32_Security_Authorization" "Win32_Storage" "Win32_Storage_FileSystem" "Win32_System" "Win32_System_Console" "Win32_System_Diagnostics" "Win32_System_Diagnostics_Debug" "Win32_System_IO" "Win32_System_Pipes" "Win32_System_SystemServices" "Win32_System_Threading" "default" ]; + }; + "windows-targets 0.42.2" = rec { + crateName = "windows-targets"; + version = "0.42.2"; + edition = "2018"; + sha256 = "0wfhnib2fisxlx8c507dbmh97kgij4r6kcxdi0f9nk6l1k080lcf"; + authors = [ + "Microsoft" + ]; + dependencies = [ + { + name = "windows_aarch64_gnullvm"; + packageId = "windows_aarch64_gnullvm 0.42.2"; + target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-pc-windows-gnullvm"); + } + { + name = "windows_aarch64_msvc"; + packageId = "windows_aarch64_msvc 0.42.2"; + target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-pc-windows-msvc"); + } + { + name = "windows_aarch64_msvc"; + packageId = "windows_aarch64_msvc 0.42.2"; + target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-uwp-windows-msvc"); + } + { + name = "windows_i686_gnu"; + packageId = "windows_i686_gnu 0.42.2"; + target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "i686-pc-windows-gnu"); + } + { + name = "windows_i686_gnu"; + packageId = "windows_i686_gnu 0.42.2"; + target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "i686-uwp-windows-gnu"); + } + { + name = "windows_i686_msvc"; + packageId = "windows_i686_msvc 0.42.2"; + target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "i686-pc-windows-msvc"); + } + { + name = "windows_i686_msvc"; + packageId = "windows_i686_msvc 0.42.2"; + target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "i686-uwp-windows-msvc"); + } + { + name = "windows_x86_64_gnu"; + packageId = "windows_x86_64_gnu 0.42.2"; + target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnu"); + } + { + name = "windows_x86_64_gnu"; + packageId = "windows_x86_64_gnu 0.42.2"; + target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-uwp-windows-gnu"); + } + { + name = "windows_x86_64_gnullvm"; + packageId = "windows_x86_64_gnullvm 0.42.2"; + target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnullvm"); + } + { + name = "windows_x86_64_msvc"; + packageId = "windows_x86_64_msvc 0.42.2"; + target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-msvc"); + } + { + name = "windows_x86_64_msvc"; + packageId = "windows_x86_64_msvc 0.42.2"; + target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-uwp-windows-msvc"); + } + ]; + + }; + "windows-targets 0.48.0" = rec { + crateName = "windows-targets"; + version = "0.48.0"; + edition = "2018"; + sha256 = "1mfzg94w0c8h4ya9sva7rra77f3iy1712af9b6bwg03wrpqbc7kv"; + authors = [ + "Microsoft" + ]; + dependencies = [ + { + name = "windows_aarch64_gnullvm"; + packageId = "windows_aarch64_gnullvm 0.48.0"; + target = { target, features }: (("aarch64" == target."arch") && ("gnu" == target."env") && ("llvm" == target."abi") && (!(target."windows_raw_dylib" or false))); + } + { + name = "windows_aarch64_msvc"; + packageId = "windows_aarch64_msvc 0.48.0"; + target = { target, features }: (("aarch64" == target."arch") && ("msvc" == target."env") && (!(target."windows_raw_dylib" or false))); + } + { + name = "windows_i686_gnu"; + packageId = "windows_i686_gnu 0.48.0"; + target = { target, features }: (("x86" == target."arch") && ("gnu" == target."env") && (!(target."windows_raw_dylib" or false))); + } + { + name = "windows_i686_msvc"; + packageId = "windows_i686_msvc 0.48.0"; + target = { target, features }: (("x86" == target."arch") && ("msvc" == target."env") && (!(target."windows_raw_dylib" or false))); + } + { + name = "windows_x86_64_gnu"; + packageId = "windows_x86_64_gnu 0.48.0"; + target = { target, features }: (("x86_64" == target."arch") && ("gnu" == target."env") && (!("llvm" == target."abi")) && (!(target."windows_raw_dylib" or false))); + } + { + name = "windows_x86_64_gnullvm"; + packageId = "windows_x86_64_gnullvm 0.48.0"; + target = { target, features }: (("x86_64" == target."arch") && ("gnu" == target."env") && ("llvm" == target."abi") && (!(target."windows_raw_dylib" or false))); + } + { + name = "windows_x86_64_msvc"; + packageId = "windows_x86_64_msvc 0.48.0"; + target = { target, features }: (("x86_64" == target."arch") && ("msvc" == target."env") && (!(target."windows_raw_dylib" or false))); + } + ]; + + }; + "windows_aarch64_gnullvm 0.42.2" = rec { + crateName = "windows_aarch64_gnullvm"; + version = "0.42.2"; + edition = "2018"; + sha256 = "1y4q0qmvl0lvp7syxvfykafvmwal5hrjb4fmv04bqs0bawc52yjr"; + authors = [ + "Microsoft" + ]; + + }; + "windows_aarch64_gnullvm 0.48.0" = rec { + crateName = "windows_aarch64_gnullvm"; + version = "0.48.0"; + edition = "2018"; + sha256 = "1g71yxi61c410pwzq05ld7si4p9hyx6lf5fkw21sinvr3cp5gbli"; + authors = [ + "Microsoft" + ]; + + }; + "windows_aarch64_msvc 0.42.2" = rec { + crateName = "windows_aarch64_msvc"; + version = "0.42.2"; + edition = "2018"; + sha256 = "0hsdikjl5sa1fva5qskpwlxzpc5q9l909fpl1w6yy1hglrj8i3p0"; + authors = [ + "Microsoft" + ]; + + }; + "windows_aarch64_msvc 0.48.0" = rec { + crateName = "windows_aarch64_msvc"; + version = "0.48.0"; + edition = "2018"; + sha256 = "1wvwipchhywcjaw73h998vzachf668fpqccbhrxzrz5xszh2gvxj"; + authors = [ + "Microsoft" + ]; + + }; + "windows_i686_gnu 0.42.2" = rec { + crateName = "windows_i686_gnu"; + version = "0.42.2"; + edition = "2018"; + sha256 = "0kx866dfrby88lqs9v1vgmrkk1z6af9lhaghh5maj7d4imyr47f6"; + authors = [ + "Microsoft" + ]; + + }; + "windows_i686_gnu 0.48.0" = rec { + crateName = "windows_i686_gnu"; + version = "0.48.0"; + edition = "2018"; + sha256 = "0hd2v9kp8fss0rzl83wzhw0s5z8q1b4875m6s1phv0yvlxi1jak2"; + authors = [ + "Microsoft" + ]; + + }; + "windows_i686_msvc 0.42.2" = rec { + crateName = "windows_i686_msvc"; + version = "0.42.2"; + edition = "2018"; + sha256 = "0q0h9m2aq1pygc199pa5jgc952qhcnf0zn688454i7v4xjv41n24"; + authors = [ + "Microsoft" + ]; + + }; + "windows_i686_msvc 0.48.0" = rec { + crateName = "windows_i686_msvc"; + version = "0.48.0"; + edition = "2018"; + sha256 = "004fkyqv3if178xx9ksqc4qqv8sz8n72mpczsr2vy8ffckiwchj5"; + authors = [ + "Microsoft" + ]; + + }; + "windows_x86_64_gnu 0.42.2" = rec { + crateName = "windows_x86_64_gnu"; + version = "0.42.2"; + edition = "2018"; + sha256 = "0dnbf2xnp3xrvy8v9mgs3var4zq9v9yh9kv79035rdgyp2w15scd"; + authors = [ + "Microsoft" + ]; + + }; + "windows_x86_64_gnu 0.48.0" = rec { + crateName = "windows_x86_64_gnu"; + version = "0.48.0"; + edition = "2018"; + sha256 = "1cblz5m6a8q6ha09bz4lz233dnq5sw2hpra06k9cna3n3xk8laya"; + authors = [ + "Microsoft" + ]; + + }; + "windows_x86_64_gnullvm 0.42.2" = rec { + crateName = "windows_x86_64_gnullvm"; + version = "0.42.2"; + edition = "2018"; + sha256 = "18wl9r8qbsl475j39zvawlidp1bsbinliwfymr43fibdld31pm16"; + authors = [ + "Microsoft" + ]; + + }; + "windows_x86_64_gnullvm 0.48.0" = rec { + crateName = "windows_x86_64_gnullvm"; + version = "0.48.0"; + edition = "2018"; + sha256 = "0lxryz3ysx0145bf3i38jkr7f9nxiym8p3syklp8f20yyk0xp5kq"; + authors = [ + "Microsoft" + ]; + + }; + "windows_x86_64_msvc 0.42.2" = rec { + crateName = "windows_x86_64_msvc"; + version = "0.42.2"; + edition = "2018"; + sha256 = "1w5r0q0yzx827d10dpjza2ww0j8iajqhmb54s735hhaj66imvv4s"; + authors = [ + "Microsoft" + ]; + + }; + "windows_x86_64_msvc 0.48.0" = rec { + crateName = "windows_x86_64_msvc"; + version = "0.48.0"; + edition = "2018"; + sha256 = "12ipr1knzj2rwjygyllfi5mkd0ihnbi3r61gag5n2jgyk5bmyl8s"; + authors = [ + "Microsoft" + ]; + + }; + "wu-manber" = rec { + crateName = "wu-manber"; + version = "0.1.0"; + edition = "2015"; + workspace_member = null; + src = pkgs.fetchgit { + url = "https://github.com/tvlfyi/wu-manber.git"; + rev = "e77628cafcf45d41b9e455be86a6b1b9f46b2092"; + sha256 = "02byhfiw41mlgr1c43n2iq6jw5sbyn8l1acv5v71a07h5l18q0cy"; + }; + authors = [ + "Joe Neeman <joeneeman@gmail.com>" + ]; + + }; + "xml-rs" = rec { + crateName = "xml-rs"; + version = "0.8.7"; + edition = "2021"; + crateBin = [ ]; + sha256 = "18nxpxqhqbhxncii03gf56hvhyf4s3icdmlks3v7lznxph2037b9"; + libName = "xml"; + authors = [ + "Vladimir Matveev <vmatveev@citrine.cc>" + ]; + + }; + "yansi" = rec { + crateName = "yansi"; + version = "0.5.1"; + edition = "2015"; + sha256 = "1v4qljgzh73knr7291cgwrf56zrvhmpn837n5n5pypzq1kciq109"; + authors = [ + "Sergio Benitez <sb@sergio.bz>" + ]; + + }; + "zstd" = rec { + crateName = "zstd"; + version = "0.9.2+zstd.1.5.1"; + edition = "2018"; + sha256 = "0m5aik2jy2w1g68i4isa0c3gq9a7avq9abgjfjbc6f60yqdym413"; + authors = [ + "Alexandre Bury <alexandre.bury@gmail.com>" + ]; + dependencies = [ + { + name = "zstd-safe"; + packageId = "zstd-safe"; + usesDefaultFeatures = false; + features = [ "std" ]; + } + ]; + features = { + "arrays" = [ "zstd-safe/arrays" ]; + "bindgen" = [ "zstd-safe/bindgen" ]; + "debug" = [ "zstd-safe/debug" ]; + "default" = [ "legacy" "arrays" ]; + "experimental" = [ "zstd-safe/experimental" ]; + "legacy" = [ "zstd-safe/legacy" ]; + "no_asm" = [ "zstd-safe/no_asm" ]; + "pkg-config" = [ "zstd-safe/pkg-config" ]; + "thin" = [ "zstd-safe/thin" ]; + "zstdmt" = [ "zstd-safe/zstdmt" ]; + }; + resolvedDefaultFeatures = [ "arrays" "default" "legacy" ]; + }; + "zstd-safe" = rec { + crateName = "zstd-safe"; + version = "4.1.3+zstd.1.5.1"; + edition = "2018"; + sha256 = "0yfvqzzkbj871f2vaikal5rm2gf60p1mdzp3jk3w5hmkkywq37g9"; + authors = [ + "Alexandre Bury <alexandre.bury@gmail.com>" + ]; + dependencies = [ + { + name = "libc"; + packageId = "libc"; + } + { + name = "zstd-sys"; + packageId = "zstd-sys"; + usesDefaultFeatures = false; + } + ]; + features = { + "bindgen" = [ "zstd-sys/bindgen" ]; + "debug" = [ "zstd-sys/debug" ]; + "default" = [ "legacy" "arrays" ]; + "experimental" = [ "zstd-sys/experimental" ]; + "legacy" = [ "zstd-sys/legacy" ]; + "no_asm" = [ "zstd-sys/no_asm" ]; + "pkg-config" = [ "zstd-sys/pkg-config" ]; + "std" = [ "zstd-sys/std" ]; + "thin" = [ "zstd-sys/thin" ]; + "zstdmt" = [ "zstd-sys/zstdmt" ]; + }; + resolvedDefaultFeatures = [ "arrays" "legacy" "std" ]; + }; + "zstd-sys" = rec { + crateName = "zstd-sys"; + version = "1.6.2+zstd.1.5.1"; + edition = "2018"; + sha256 = "17xcr0mw8ps9hlc8m0dzj7yd52lb9r9ic9fbpxa4994yilj2zbrd"; + authors = [ + "Alexandre Bury <alexandre.bury@gmail.com>" + ]; + dependencies = [ + { + name = "libc"; + packageId = "libc"; + } + ]; + buildDependencies = [ + { + name = "cc"; + packageId = "cc"; + features = [ "parallel" ]; + } + ]; + features = { + "bindgen" = [ "dep:bindgen" ]; + "default" = [ "legacy" ]; + "pkg-config" = [ "dep:pkg-config" ]; + }; + resolvedDefaultFeatures = [ "legacy" "std" ]; + }; + }; + + # + # crate2nix/default.nix (excerpt start) + # + + /* Target (platform) data for conditional dependencies. + This corresponds roughly to what buildRustCrate is setting. + */ + makeDefaultTarget = platform: { + unix = platform.isUnix; + windows = platform.isWindows; + fuchsia = true; + test = false; + + /* We are choosing an arbitrary rust version to grab `lib` from, + which is unfortunate, but `lib` has been version-agnostic the + whole time so this is good enough for now. + */ + os = pkgs.rust.lib.toTargetOs platform; + arch = pkgs.rust.lib.toTargetArch platform; + family = pkgs.rust.lib.toTargetFamily platform; + env = "gnu"; + endian = + if platform.parsed.cpu.significantByte.name == "littleEndian" + then "little" else "big"; + pointer_width = toString platform.parsed.cpu.bits; + vendor = platform.parsed.vendor.name; + debug_assertions = false; + }; + + /* Filters common temp files and build files. */ + # TODO(pkolloch): Substitute with gitignore filter + sourceFilter = name: type: + let + baseName = builtins.baseNameOf (builtins.toString name); + in + ! ( + # Filter out git + baseName == ".gitignore" + || (type == "directory" && baseName == ".git") + + # Filter out build results + || ( + type == "directory" && ( + baseName == "target" + || baseName == "_site" + || baseName == ".sass-cache" + || baseName == ".jekyll-metadata" + || baseName == "build-artifacts" + ) + ) + + # Filter out nix-build result symlinks + || ( + type == "symlink" && lib.hasPrefix "result" baseName + ) + + # Filter out IDE config + || ( + type == "directory" && ( + baseName == ".idea" || baseName == ".vscode" + ) + ) || lib.hasSuffix ".iml" baseName + + # Filter out nix build files + || baseName == "Cargo.nix" + + # Filter out editor backup / swap files. + || lib.hasSuffix "~" baseName + || builtins.match "^\\.sw[a-z]$$" baseName != null + || builtins.match "^\\..*\\.sw[a-z]$$" baseName != null + || lib.hasSuffix ".tmp" baseName + || lib.hasSuffix ".bak" baseName + || baseName == "tests.nix" + ); + + /* Returns a crate which depends on successful test execution + of crate given as the second argument. + + testCrateFlags: list of flags to pass to the test exectuable + testInputs: list of packages that should be available during test execution + */ + crateWithTest = { crate, testCrate, testCrateFlags, testInputs, testPreRun, testPostRun }: + assert builtins.typeOf testCrateFlags == "list"; + assert builtins.typeOf testInputs == "list"; + assert builtins.typeOf testPreRun == "string"; + assert builtins.typeOf testPostRun == "string"; + let + # override the `crate` so that it will build and execute tests instead of + # building the actual lib and bin targets We just have to pass `--test` + # to rustc and it will do the right thing. We execute the tests and copy + # their log and the test executables to $out for later inspection. + test = + let + drv = testCrate.override + ( + _: { + buildTests = true; + } + ); + # If the user hasn't set any pre/post commands, we don't want to + # insert empty lines. This means that any existing users of crate2nix + # don't get a spurious rebuild unless they set these explicitly. + testCommand = pkgs.lib.concatStringsSep "\n" + (pkgs.lib.filter (s: s != "") [ + testPreRun + "$f $testCrateFlags 2>&1 | tee -a $out" + testPostRun + ]); + in + pkgs.runCommand "run-tests-${testCrate.name}" + { + inherit testCrateFlags; + buildInputs = testInputs; + } '' + set -ex + + export RUST_BACKTRACE=1 + + # recreate a file hierarchy as when running tests with cargo + + # the source for test data + ${pkgs.xorg.lndir}/bin/lndir ${crate.src} + + # build outputs + testRoot=target/debug + mkdir -p $testRoot + + # executables of the crate + # we copy to prevent std::env::current_exe() to resolve to a store location + for i in ${crate}/bin/*; do + cp "$i" "$testRoot" + done + chmod +w -R . + + # test harness executables are suffixed with a hash, like cargo does + # this allows to prevent name collision with the main + # executables of the crate + hash=$(basename $out) + for file in ${drv}/tests/*; do + f=$testRoot/$(basename $file)-$hash + cp $file $f + ${testCommand} + done + ''; + in + pkgs.runCommand "${crate.name}-linked" + { + inherit (crate) outputs crateName; + passthru = (crate.passthru or { }) // { + inherit test; + }; + } '' + echo tested by ${test} + ${lib.concatMapStringsSep "\n" (output: "ln -s ${crate.${output}} ${"$"}${output}") crate.outputs} + ''; + + /* A restricted overridable version of builtRustCratesWithFeatures. */ + buildRustCrateWithFeatures = + { packageId + , features ? rootFeatures + , crateOverrides ? defaultCrateOverrides + , buildRustCrateForPkgsFunc ? null + , runTests ? false + , testCrateFlags ? [ ] + , testInputs ? [ ] + # Any command to run immediatelly before a test is executed. + , testPreRun ? "" + # Any command run immediatelly after a test is executed. + , testPostRun ? "" + }: + lib.makeOverridable + ( + { features + , crateOverrides + , runTests + , testCrateFlags + , testInputs + , testPreRun + , testPostRun + }: + let + buildRustCrateForPkgsFuncOverriden = + if buildRustCrateForPkgsFunc != null + then buildRustCrateForPkgsFunc + else + ( + if crateOverrides == pkgs.defaultCrateOverrides + then buildRustCrateForPkgs + else + pkgs: (buildRustCrateForPkgs pkgs).override { + defaultCrateOverrides = crateOverrides; + } + ); + builtRustCrates = builtRustCratesWithFeatures { + inherit packageId features; + buildRustCrateForPkgsFunc = buildRustCrateForPkgsFuncOverriden; + runTests = false; + }; + builtTestRustCrates = builtRustCratesWithFeatures { + inherit packageId features; + buildRustCrateForPkgsFunc = buildRustCrateForPkgsFuncOverriden; + runTests = true; + }; + drv = builtRustCrates.crates.${packageId}; + testDrv = builtTestRustCrates.crates.${packageId}; + derivation = + if runTests then + crateWithTest + { + crate = drv; + testCrate = testDrv; + inherit testCrateFlags testInputs testPreRun testPostRun; + } + else drv; + in + derivation + ) + { inherit features crateOverrides runTests testCrateFlags testInputs testPreRun testPostRun; }; + + /* Returns an attr set with packageId mapped to the result of buildRustCrateForPkgsFunc + for the corresponding crate. + */ + builtRustCratesWithFeatures = + { packageId + , features + , crateConfigs ? crates + , buildRustCrateForPkgsFunc + , runTests + , makeTarget ? makeDefaultTarget + } @ args: + assert (builtins.isAttrs crateConfigs); + assert (builtins.isString packageId); + assert (builtins.isList features); + assert (builtins.isAttrs (makeTarget stdenv.hostPlatform)); + assert (builtins.isBool runTests); + let + rootPackageId = packageId; + mergedFeatures = mergePackageFeatures + ( + args // { + inherit rootPackageId; + target = makeTarget stdenv.hostPlatform // { test = runTests; }; + } + ); + # Memoize built packages so that reappearing packages are only built once. + builtByPackageIdByPkgs = mkBuiltByPackageIdByPkgs pkgs; + mkBuiltByPackageIdByPkgs = pkgs: + let + self = { + crates = lib.mapAttrs (packageId: value: buildByPackageIdForPkgsImpl self pkgs packageId) crateConfigs; + target = makeTarget pkgs.stdenv.hostPlatform; + build = mkBuiltByPackageIdByPkgs pkgs.buildPackages; + }; + in + self; + buildByPackageIdForPkgsImpl = self: pkgs: packageId: + let + features = mergedFeatures."${packageId}" or [ ]; + crateConfig' = crateConfigs."${packageId}"; + crateConfig = + builtins.removeAttrs crateConfig' [ "resolvedDefaultFeatures" "devDependencies" ]; + devDependencies = + lib.optionals + (runTests && packageId == rootPackageId) + (crateConfig'.devDependencies or [ ]); + dependencies = + dependencyDerivations { + inherit features; + inherit (self) target; + buildByPackageId = depPackageId: + # proc_macro crates must be compiled for the build architecture + if crateConfigs.${depPackageId}.procMacro or false + then self.build.crates.${depPackageId} + else self.crates.${depPackageId}; + dependencies = + (crateConfig.dependencies or [ ]) + ++ devDependencies; + }; + buildDependencies = + dependencyDerivations { + inherit features; + inherit (self.build) target; + buildByPackageId = depPackageId: + self.build.crates.${depPackageId}; + dependencies = crateConfig.buildDependencies or [ ]; + }; + dependenciesWithRenames = + let + buildDeps = filterEnabledDependencies { + inherit features; + inherit (self) target; + dependencies = crateConfig.dependencies or [ ] ++ devDependencies; + }; + hostDeps = filterEnabledDependencies { + inherit features; + inherit (self.build) target; + dependencies = crateConfig.buildDependencies or [ ]; + }; + in + lib.filter (d: d ? "rename") (hostDeps ++ buildDeps); + # Crate renames have the form: + # + # { + # crate_name = [ + # { version = "1.2.3"; rename = "crate_name01"; } + # ]; + # # ... + # } + crateRenames = + let + grouped = + lib.groupBy + (dependency: dependency.name) + dependenciesWithRenames; + versionAndRename = dep: + let + package = crateConfigs."${dep.packageId}"; + in + { inherit (dep) rename; version = package.version; }; + in + lib.mapAttrs (name: choices: builtins.map versionAndRename choices) grouped; + in + buildRustCrateForPkgsFunc pkgs + ( + crateConfig // { + src = crateConfig.src or ( + pkgs.fetchurl rec { + name = "${crateConfig.crateName}-${crateConfig.version}.tar.gz"; + # https://www.pietroalbini.org/blog/downloading-crates-io/ + # Not rate-limited, CDN URL. + url = "https://static.crates.io/crates/${crateConfig.crateName}/${crateConfig.crateName}-${crateConfig.version}.crate"; + sha256 = + assert (lib.assertMsg (crateConfig ? sha256) "Missing sha256 for ${name}"); + crateConfig.sha256; + } + ); + extraRustcOpts = lib.lists.optional (targetFeatures != [ ]) "-C target-feature=${lib.concatMapStringsSep "," (x: "+${x}") targetFeatures}"; + inherit features dependencies buildDependencies crateRenames release; + } + ); + in + builtByPackageIdByPkgs; + + /* Returns the actual derivations for the given dependencies. */ + dependencyDerivations = + { buildByPackageId + , features + , dependencies + , target + }: + assert (builtins.isList features); + assert (builtins.isList dependencies); + assert (builtins.isAttrs target); + let + enabledDependencies = filterEnabledDependencies { + inherit dependencies features target; + }; + depDerivation = dependency: buildByPackageId dependency.packageId; + in + map depDerivation enabledDependencies; + + /* Returns a sanitized version of val with all values substituted that cannot + be serialized as JSON. + */ + sanitizeForJson = val: + if builtins.isAttrs val + then lib.mapAttrs (n: v: sanitizeForJson v) val + else if builtins.isList val + then builtins.map sanitizeForJson val + else if builtins.isFunction val + then "function" + else val; + + /* Returns various tools to debug a crate. */ + debugCrate = { packageId, target ? makeDefaultTarget stdenv.hostPlatform }: + assert (builtins.isString packageId); + let + debug = rec { + # The built tree as passed to buildRustCrate. + buildTree = buildRustCrateWithFeatures { + buildRustCrateForPkgsFunc = _: lib.id; + inherit packageId; + }; + sanitizedBuildTree = sanitizeForJson buildTree; + dependencyTree = sanitizeForJson + ( + buildRustCrateWithFeatures { + buildRustCrateForPkgsFunc = _: crate: { + "01_crateName" = crate.crateName or false; + "02_features" = crate.features or [ ]; + "03_dependencies" = crate.dependencies or [ ]; + }; + inherit packageId; + } + ); + mergedPackageFeatures = mergePackageFeatures { + features = rootFeatures; + inherit packageId target; + }; + diffedDefaultPackageFeatures = diffDefaultPackageFeatures { + inherit packageId target; + }; + }; + in + { internal = debug; }; + + /* Returns differences between cargo default features and crate2nix default + features. + + This is useful for verifying the feature resolution in crate2nix. + */ + diffDefaultPackageFeatures = + { crateConfigs ? crates + , packageId + , target + }: + assert (builtins.isAttrs crateConfigs); + let + prefixValues = prefix: lib.mapAttrs (n: v: { "${prefix}" = v; }); + mergedFeatures = + prefixValues + "crate2nix" + (mergePackageFeatures { inherit crateConfigs packageId target; features = [ "default" ]; }); + configs = prefixValues "cargo" crateConfigs; + combined = lib.foldAttrs (a: b: a // b) { } [ mergedFeatures configs ]; + onlyInCargo = + builtins.attrNames + (lib.filterAttrs (n: v: !(v ? "crate2nix") && (v ? "cargo")) combined); + onlyInCrate2Nix = + builtins.attrNames + (lib.filterAttrs (n: v: (v ? "crate2nix") && !(v ? "cargo")) combined); + differentFeatures = lib.filterAttrs + ( + n: v: + (v ? "crate2nix") + && (v ? "cargo") + && (v.crate2nix.features or [ ]) != (v."cargo".resolved_default_features or [ ]) + ) + combined; + in + builtins.toJSON { + inherit onlyInCargo onlyInCrate2Nix differentFeatures; + }; + + /* Returns an attrset mapping packageId to the list of enabled features. + + If multiple paths to a dependency enable different features, the + corresponding feature sets are merged. Features in rust are additive. + */ + mergePackageFeatures = + { crateConfigs ? crates + , packageId + , rootPackageId ? packageId + , features ? rootFeatures + , dependencyPath ? [ crates.${packageId}.crateName ] + , featuresByPackageId ? { } + , target + # Adds devDependencies to the crate with rootPackageId. + , runTests ? false + , ... + } @ args: + assert (builtins.isAttrs crateConfigs); + assert (builtins.isString packageId); + assert (builtins.isString rootPackageId); + assert (builtins.isList features); + assert (builtins.isList dependencyPath); + assert (builtins.isAttrs featuresByPackageId); + assert (builtins.isAttrs target); + assert (builtins.isBool runTests); + let + crateConfig = crateConfigs."${packageId}" or (builtins.throw "Package not found: ${packageId}"); + expandedFeatures = expandFeatures (crateConfig.features or { }) features; + enabledFeatures = enableFeatures (crateConfig.dependencies or [ ]) expandedFeatures; + depWithResolvedFeatures = dependency: + let + packageId = dependency.packageId; + features = dependencyFeatures enabledFeatures dependency; + in + { inherit packageId features; }; + resolveDependencies = cache: path: dependencies: + assert (builtins.isAttrs cache); + assert (builtins.isList dependencies); + let + enabledDependencies = filterEnabledDependencies { + inherit dependencies target; + features = enabledFeatures; + }; + directDependencies = map depWithResolvedFeatures enabledDependencies; + foldOverCache = op: lib.foldl op cache directDependencies; + in + foldOverCache + ( + cache: { packageId, features }: + let + cacheFeatures = cache.${packageId} or [ ]; + combinedFeatures = sortedUnique (cacheFeatures ++ features); + in + if cache ? ${packageId} && cache.${packageId} == combinedFeatures + then cache + else + mergePackageFeatures { + features = combinedFeatures; + featuresByPackageId = cache; + inherit crateConfigs packageId target runTests rootPackageId; + } + ); + cacheWithSelf = + let + cacheFeatures = featuresByPackageId.${packageId} or [ ]; + combinedFeatures = sortedUnique (cacheFeatures ++ enabledFeatures); + in + featuresByPackageId // { + "${packageId}" = combinedFeatures; + }; + cacheWithDependencies = + resolveDependencies cacheWithSelf "dep" + ( + crateConfig.dependencies or [ ] + ++ lib.optionals + (runTests && packageId == rootPackageId) + (crateConfig.devDependencies or [ ]) + ); + cacheWithAll = + resolveDependencies + cacheWithDependencies "build" + (crateConfig.buildDependencies or [ ]); + in + cacheWithAll; + + /* Returns the enabled dependencies given the enabled features. */ + filterEnabledDependencies = { dependencies, features, target }: + assert (builtins.isList dependencies); + assert (builtins.isList features); + assert (builtins.isAttrs target); + + lib.filter + ( + dep: + let + targetFunc = dep.target or (features: true); + in + targetFunc { inherit features target; } + && ( + !(dep.optional or false) + || builtins.any (doesFeatureEnableDependency dep) features + ) + ) + dependencies; + + /* Returns whether the given feature should enable the given dependency. */ + doesFeatureEnableDependency = dependency: feature: + let + name = dependency.rename or dependency.name; + prefix = "${name}/"; + len = builtins.stringLength prefix; + startsWithPrefix = builtins.substring 0 len feature == prefix; + in + feature == name || feature == "dep:" + name || startsWithPrefix; + + /* Returns the expanded features for the given inputFeatures by applying the + rules in featureMap. + + featureMap is an attribute set which maps feature names to lists of further + feature names to enable in case this feature is selected. + */ + expandFeatures = featureMap: inputFeatures: + assert (builtins.isAttrs featureMap); + assert (builtins.isList inputFeatures); + let + expandFeature = feature: + assert (builtins.isString feature); + [ feature ] ++ (expandFeatures featureMap (featureMap."${feature}" or [ ])); + outFeatures = lib.concatMap expandFeature inputFeatures; + in + sortedUnique outFeatures; + + /* This function adds optional dependencies as features if they are enabled + indirectly by dependency features. This function mimics Cargo's behavior + described in a note at: + https://doc.rust-lang.org/nightly/cargo/reference/features.html#dependency-features + */ + enableFeatures = dependencies: features: + assert (builtins.isList features); + assert (builtins.isList dependencies); + let + additionalFeatures = lib.concatMap + ( + dependency: + assert (builtins.isAttrs dependency); + let + enabled = builtins.any (doesFeatureEnableDependency dependency) features; + in + if (dependency.optional or false) && enabled + then [ (dependency.rename or dependency.name) ] + else [ ] + ) + dependencies; + in + sortedUnique (features ++ additionalFeatures); + + /* + Returns the actual features for the given dependency. + + features: The features of the crate that refers this dependency. + */ + dependencyFeatures = features: dependency: + assert (builtins.isList features); + assert (builtins.isAttrs dependency); + let + defaultOrNil = + if dependency.usesDefaultFeatures or true + then [ "default" ] + else [ ]; + explicitFeatures = dependency.features or [ ]; + additionalDependencyFeatures = + let + dependencyPrefix = (dependency.rename or dependency.name) + "/"; + dependencyFeatures = + builtins.filter (f: lib.hasPrefix dependencyPrefix f) features; + in + builtins.map (lib.removePrefix dependencyPrefix) dependencyFeatures; + in + defaultOrNil ++ explicitFeatures ++ additionalDependencyFeatures; + + /* Sorts and removes duplicates from a list of strings. */ + sortedUnique = features: + assert (builtins.isList features); + assert (builtins.all builtins.isString features); + let + outFeaturesSet = lib.foldl (set: feature: set // { "${feature}" = 1; }) { } features; + outFeaturesUnique = builtins.attrNames outFeaturesSet; + in + builtins.sort (a: b: a < b) outFeaturesUnique; + + deprecationWarning = message: value: + if strictDeprecation + then builtins.throw "strictDeprecation enabled, aborting: ${message}" + else builtins.trace message value; + + # + # crate2nix/default.nix (excerpt end) + # + }; +} + diff --git a/tvix/Cargo.toml b/tvix/Cargo.toml new file mode 100644 index 000000000000..314e329e6f2a --- /dev/null +++ b/tvix/Cargo.toml @@ -0,0 +1,35 @@ +# This Cargo file is a workspace configuration as per +# https://doc.rust-lang.org/book/ch14-03-cargo-workspaces.html +# +# We add this file to get a coherent set of dependencies across Tvix +# crates by sharing a Cargo.lock. This is necessary because of the +# currently limited support for Rust builds in Nix. +# +# Note that this explicitly does *not* mean that //tvix should be +# considered "one project": This is simply a workaround for a +# technical limitation and it should be our aim to remove this +# workspace file and make the subprojects independent. +# +# Note also that CI targets for actual projects should *not* be tied +# to //tvix, but to its subprojects. A potential file at +# //tvix/default.nix should likely *not* expose anything other than +# extra steps or other auxiliary targets. + +[workspace] + +members = [ + "cli", + "eval", + "eval/builtin-macros", + "nix_cli", + "nix-compat", + "serde", + "store", +] + +# Add a profile to all targets that enables release optimisations, but +# retains debug symbols. This is great for use with +# benchmarking/profiling tools. +[profile.release-with-debug] +inherits = "release" +debug = true diff --git a/tvix/LICENSE b/tvix/LICENSE new file mode 100644 index 000000000000..f288702d2fa1 --- /dev/null +++ b/tvix/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/> + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + <one line to give the program's name and a brief idea of what it does.> + Copyright (C) <year> <name of author> + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see <https://www.gnu.org/licenses/>. + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + <program> Copyright (C) <year> <name of author> + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +<https://www.gnu.org/licenses/>. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +<https://www.gnu.org/licenses/why-not-lgpl.html>. diff --git a/tvix/OWNERS b/tvix/OWNERS new file mode 100644 index 000000000000..ab6d4f44ad13 --- /dev/null +++ b/tvix/OWNERS @@ -0,0 +1,7 @@ +set noparent + +adisbladis +flokli +grfn +sterni +tazjin diff --git a/tvix/README.md b/tvix/README.md new file mode 100644 index 000000000000..02f3b480ba4e --- /dev/null +++ b/tvix/README.md @@ -0,0 +1,104 @@ +<div align="center"> + <img src="https://tvix.dev/tvix-logo.webp"> +</div> + +----------------- + +Tvix is a new implementation of the Nix language and package manager. See the +[announcement post][post-1] for information about the background of this +project. + +Tvix is developed by [TVL][tvl] in our monorepo, the `depot`, at +[//tvix][tvix-src]. Code reviews take place on [Gerrit][tvix-gerrit], bugs are +filed in [our issue tracker][b]. + +For more information about Tvix, feel free to reach out. We are interested in +people who would like to help us review designs, brainstorm and describe +requirements that we may not yet have considered. + +Most of the discussion around development happens on our IRC channel, which you +can join in several ways documented on [tvl.fyi][getting-in-touch], or on our +[mailing list][]. + +Contributions to Tvix follow the TVL [review flow][review-docs] and +[contribution guidelines][contributing]. + +[post-1]: https://tvl.fyi/blog/rewriting-nix +[tvl]: https://tvl.fyi +[tvix-src]: https://cs.tvl.fyi/depot/-/tree/tvix/ +[tvix-gerrit]: https://cl.tvl.fyi/q/path:%255Etvix.* +[b]: https://b.tvl.fyi +[getting-in-touch]: https://tvl.fyi/#getting-in-touch +[mailing list]: https://inbox.tvl.su +[review-docs]: https://code.tvl.fyi/about/docs/REVIEWS.md +[contributing]: https://code.tvl.fyi/about/docs/CONTRIBUTING.md + +WARNING: Tvix is not ready for use in production. None of our current APIs +should be considered stable in any way. + +WARNING: Any other instances of this project or repository are +[`josh`-mirrors][josh]. We do not accept code contributions or issues outside of +the tooling and communication methods outlined above. + +[josh]: https://github.com/josh-project/josh + +## Components + +This folder contains the following components: + +* `//tvix/eval` - an implementation of the Nix programming language +* `//tvix/nix-compat` - library functions for compatibility with C++ Nix +* `//tvix/cli` - preliminary REPL & CLI implementation for Tvix +* `//tvix/serde` - Rust library for using the Nix language for app configuration +* `//tvix/store` - implementation of a file store for Tvix + +Some additional folders with auxiliary things exist and can be explored at your +leisure. + +## Building the CLI + +The CLI can also be built with standard Rust tooling (i.e. `cargo build`), +as long as you are in a shell with the right dependencies. + + - If you cloned the full monorepo, it can be provided by + `mg shell //tvix:shell`. + - If you cloned the `tvix` workspace only + (`git clone https://code.tvl.fyi/depot.git:workspace=views/tvix.git`), + `nix-shell` provides it. + +If you're in the TVL monorepo, you can also run `mg build //tvix/cli` +(or `mg build` from inside that folder) for a more incremental build. + +Please follow the depot-wide instructions on how to get `mg` and use the depot +tooling. + +### Compatibility +**Important note:** We only use and test Nix builds of our software +against Nix 2.3. There are a variety of bugs and subtle problems in +newer Nix versions which we do not have the bandwidth to address, +builds in newer Nix versions may or may not work. + +## Rust projects, crate2nix + +Some parts of Tvix are written in Rust. To simplify the dependency +management on the Nix side of these builds, we use `crate2nix` in a +single Rust workspace in `//tvix` to maintain the Nix build +configuration. + +When making changes to Cargo dependency configuration in any of the +Rust projects under `//tvix`, be sure to run +`mg run //tvix:crate2nixGenerate --` in `//tvix` itself and commit the changes +to the generated `Cargo.nix` file. This only applies to the full TVL checkout. + +## License structure + +All code implemented for Tvix is licensed under the GPL-3.0, with the +exception of the protocol buffer definitions used for communication +between services which are available under a more permissive license +(MIT). + +The idea behind this structure is that any direct usage of our code +(e.g. linking to it, embedding the evaluator, etc.) will fall under +the terms of the GPL3, but users are free to implement their own +components speaking these protocols under the terms of the MIT +license. diff --git a/tvix/cli/Cargo.toml b/tvix/cli/Cargo.toml new file mode 100644 index 000000000000..676da02a07a2 --- /dev/null +++ b/tvix/cli/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "tvix-cli" +version = "0.1.0" +edition = "2021" + +[[bin]] +name = "tvix" +path = "src/main.rs" + +[dependencies] +nix-compat = { path = "../nix-compat" } +tvix-store = { path = "../store" } +tvix-eval = { path = "../eval" } +rustyline = "10.0.0" +clap = { version = "4.0", features = ["derive", "env"] } +dirs = "4.0.0" +smol_str = "0.2.0" +ssri = "7.0.0" +data-encoding = "2.3.3" +thiserror = "1.0.38" + +[dependencies.wu-manber] +git = "https://github.com/tvlfyi/wu-manber.git" diff --git a/tvix/cli/default.nix b/tvix/cli/default.nix new file mode 100644 index 000000000000..a4bb19701f01 --- /dev/null +++ b/tvix/cli/default.nix @@ -0,0 +1,36 @@ +{ depot, pkgs, lib, ... }: + +let + mkNixpkgsEvalCheck = attrset: expectedPath: { + label = ":nix: evaluate nixpkgs.${attrset} in tvix"; + needsOutput = true; + + command = pkgs.writeShellScript "tvix-eval-${builtins.replaceStrings [".drv"] ["-drv"] attrset}" '' + TVIX_OUTPUT=$(result/bin/tvix -E '(import ${pkgs.path} {}).${attrset}') + EXPECTED='${/* the verbatim expected Tvix output: */ "=> \"${expectedPath}\" :: string"}' + + echo "Tvix output: ''${TVIX_OUTPUT}" + if [ "$TVIX_OUTPUT" != "$EXPECTED" ]; then + echo "Correct would have been ''${EXPECTED}" + exit 1 + fi + + echo "Output was correct." + ''; + }; +in + +(depot.tvix.crates.workspaceMembers.tvix-cli.build.override { + runTests = true; +}).overrideAttrs (_: { + meta = { + ci.extraSteps = { + eval-nixpkgs-stdenv-drvpath = (mkNixpkgsEvalCheck "stdenv.drvPath" pkgs.stdenv.drvPath); + eval-nixpkgs-stdenv-outpath = (mkNixpkgsEvalCheck "stdenv.outPath" pkgs.stdenv.outPath); + eval-nixpkgs-hello-outpath = (mkNixpkgsEvalCheck "hello.outPath" pkgs.hello.outPath); + + # This is the furthest we get starting with stdenv we hit something similar to b/261 + eval-nixpkgs-cross-gcc-outpath = (mkNixpkgsEvalCheck "pkgsCross.aarch64-multiplatform.buildPackages.gcc.outPath" pkgs.pkgsCross.aarch64-multiplatform.buildPackages.gcc.outPath); + }; + }; +}) diff --git a/tvix/cli/src/.skip-subtree b/tvix/cli/src/.skip-subtree new file mode 100644 index 000000000000..a16a2afe1f1e --- /dev/null +++ b/tvix/cli/src/.skip-subtree @@ -0,0 +1 @@ +Because of the derivation.nix file ... diff --git a/tvix/cli/src/derivation.nix b/tvix/cli/src/derivation.nix new file mode 100644 index 000000000000..9355cc3a96f0 --- /dev/null +++ b/tvix/cli/src/derivation.nix @@ -0,0 +1,36 @@ +# LGPL-2.1-or-later +# +# taken from: https://github.com/NixOS/nix/blob/master/src/libexpr/primops/derivation.nix +# +# TODO: rewrite in native Rust code + +/* This is the implementation of the ‘derivation’ builtin function. + It's actually a wrapper around the ‘derivationStrict’ primop. */ + +drvAttrs @ { outputs ? [ "out" ], ... }: + +let + + strict = derivationStrict drvAttrs; + + commonAttrs = drvAttrs // (builtins.listToAttrs outputsList) // + { + all = map (x: x.value) outputsList; + inherit drvAttrs; + }; + + outputToAttrListElement = outputName: + { + name = outputName; + value = commonAttrs // { + outPath = builtins.getAttr outputName strict; + drvPath = strict.drvPath; + type = "derivation"; + inherit outputName; + }; + }; + + outputsList = map outputToAttrListElement outputs; + +in +(builtins.head outputsList).value diff --git a/tvix/cli/src/derivation.rs b/tvix/cli/src/derivation.rs new file mode 100644 index 000000000000..cf15ebbb0dc3 --- /dev/null +++ b/tvix/cli/src/derivation.rs @@ -0,0 +1,697 @@ +//! Implements `builtins.derivation`, the core of what makes Nix build packages. +use nix_compat::derivation::Derivation; +use nix_compat::nixhash; +use std::cell::RefCell; +use std::collections::{btree_map, BTreeSet}; +use std::rc::Rc; +use tvix_eval::builtin_macros::builtins; +use tvix_eval::generators::{self, GenCo}; +use tvix_eval::{AddContext, CoercionKind, ErrorKind, NixAttrs, NixList, Value}; + +use crate::errors::Error; +use crate::known_paths::{KnownPaths, PathKind, PathName}; + +// Constants used for strangely named fields in derivation inputs. +const STRUCTURED_ATTRS: &str = "__structuredAttrs"; +const IGNORE_NULLS: &str = "__ignoreNulls"; + +/// Helper function for populating the `drv.outputs` field from a +/// manually specified set of outputs, instead of the default +/// `outputs`. +async fn populate_outputs( + co: &GenCo, + drv: &mut Derivation, + outputs: NixList, +) -> Result<(), ErrorKind> { + // Remove the original default `out` output. + drv.outputs.clear(); + + for output in outputs { + let output_name = generators::request_force(co, output) + .await + .to_str() + .context("determining output name")?; + + if drv + .outputs + .insert(output_name.as_str().into(), Default::default()) + .is_some() + { + return Err(Error::DuplicateOutput(output_name.as_str().into()).into()); + } + } + + Ok(()) +} + +/// Populate the inputs of a derivation from the build references +/// found when scanning the derivation's parameters. +fn populate_inputs<I: IntoIterator<Item = PathName>>( + drv: &mut Derivation, + known_paths: &KnownPaths, + references: I, +) { + for reference in references.into_iter() { + let reference = &known_paths[&reference]; + match &reference.kind { + PathKind::Plain => { + drv.input_sources.insert(reference.path.clone()); + } + + PathKind::Output { name, derivation } => { + match drv.input_derivations.entry(derivation.clone()) { + btree_map::Entry::Vacant(entry) => { + entry.insert(BTreeSet::from([name.clone()])); + } + + btree_map::Entry::Occupied(mut entry) => { + entry.get_mut().insert(name.clone()); + } + } + } + + PathKind::Derivation { output_names } => { + match drv.input_derivations.entry(reference.path.clone()) { + btree_map::Entry::Vacant(entry) => { + entry.insert(output_names.clone()); + } + + btree_map::Entry::Occupied(mut entry) => { + entry.get_mut().extend(output_names.clone().into_iter()); + } + } + } + } + } +} + +/// Populate the output configuration of a derivation based on the +/// parameters passed to the call, flipping the required +/// parameters for a fixed-output derivation if necessary. +/// +/// This function handles all possible combinations of the +/// parameters, including invalid ones. +/// +/// Due to the support for SRI hashes, and how these are passed along to +/// builtins.derivation, outputHash and outputHashAlgo can have values which +/// need to be further modified before constructing the Derivation struct. +/// +/// If outputHashAlgo is an SRI hash, outputHashAlgo must either be an empty +/// string, or the hash algorithm as specified in the (single) SRI (entry). +/// SRI strings with multiple hash algorithms are not supported. +/// +/// In case an SRI string was used, the (single) fixed output is populated +/// with the hash algo name, and the hash digest is populated with the +/// (lowercase) hex encoding of the digest. +/// +/// These values are only rewritten for the outputs, not what's passed to env. +fn populate_output_configuration( + drv: &mut Derivation, + hash: Option<String>, // in nix: outputHash + hash_algo: Option<String>, // in nix: outputHashAlgo + hash_mode: Option<String>, // in nix: outputHashmode +) -> Result<(), ErrorKind> { + // We only do something when `digest` and `algo` are `Some(_)``, and + // there's an `out` output. + if let (Some(hash), Some(algo), hash_mode) = (hash, hash_algo, hash_mode) { + match drv.outputs.get_mut("out") { + None => return Err(Error::ConflictingOutputTypes.into()), + Some(out) => { + // treat an empty algo as None + let a = if algo.is_empty() { + None + } else { + Some(algo.as_ref()) + }; + + let output_hash = nixhash::from_str(&hash, a).map_err(Error::InvalidOutputHash)?; + + // construct the NixHashWithMode. + out.hash_with_mode = match hash_mode.as_deref() { + None | Some("flat") => Some(nixhash::NixHashWithMode::Flat( + nixhash::NixHash::new(output_hash.algo, output_hash.digest), + )), + Some("recursive") => Some(nixhash::NixHashWithMode::Recursive( + nixhash::NixHash::new(output_hash.algo, output_hash.digest), + )), + Some(other) => { + return Err(Error::InvalidOutputHashMode(other.to_string()).into()) + } + } + } + } + } + + Ok(()) +} + +/// Handles derivation parameters which are not just forwarded to +/// the environment. The return value indicates whether the +/// parameter should be included in the environment. +async fn handle_derivation_parameters( + drv: &mut Derivation, + co: &GenCo, + name: &str, + value: &Value, + val_str: &str, +) -> Result<bool, ErrorKind> { + match name { + IGNORE_NULLS => return Ok(false), + + // Command line arguments to the builder. + "args" => { + let args = value.to_list()?; + for arg in args { + drv.arguments.push(strong_coerce_to_string(co, arg).await?); + } + + // The arguments do not appear in the environment. + return Ok(false); + } + + // Explicitly specified drv outputs (instead of default [ "out" ]) + "outputs" => { + let outputs = value + .to_list() + .context("looking at the `outputs` parameter of the derivation")?; + + populate_outputs(co, drv, outputs).await?; + } + + "builder" => { + drv.builder = val_str.to_string(); + } + + "system" => { + drv.system = val_str.to_string(); + } + + _ => {} + } + + Ok(true) +} + +async fn strong_coerce_to_string(co: &GenCo, val: Value) -> Result<String, ErrorKind> { + let val = generators::request_force(co, val).await; + let val_str = generators::request_string_coerce(co, val, CoercionKind::Strong).await; + + Ok(val_str.as_str().to_string()) +} + +#[builtins(state = "Rc<RefCell<KnownPaths>>")] +mod derivation_builtins { + use super::*; + use nix_compat::store_path::hash_placeholder; + use tvix_eval::generators::Gen; + + #[builtin("placeholder")] + async fn builtin_placeholder(co: GenCo, input: Value) -> Result<Value, ErrorKind> { + let placeholder = hash_placeholder( + input + .to_str() + .context("looking at output name in builtins.placeholder")? + .as_str(), + ); + + Ok(placeholder.into()) + } + + /// Strictly construct a Nix derivation from the supplied arguments. + /// + /// This is considered an internal function, users usually want to + /// use the higher-level `builtins.derivation` instead. + #[builtin("derivationStrict")] + async fn builtin_derivation_strict( + state: Rc<RefCell<KnownPaths>>, + co: GenCo, + input: Value, + ) -> Result<Value, ErrorKind> { + let input = input.to_attrs()?; + let name = generators::request_force(&co, input.select_required("name")?.clone()) + .await + .to_str() + .context("determining derivation name")?; + + // Check whether attributes should be passed as a JSON file. + // TODO: the JSON serialisation has to happen here. + if let Some(sa) = input.select(STRUCTURED_ATTRS) { + if generators::request_force(&co, sa.clone()).await.as_bool()? { + return Err(ErrorKind::NotImplemented(STRUCTURED_ATTRS)); + } + } + + // Check whether null attributes should be ignored or passed through. + let ignore_nulls = match input.select(IGNORE_NULLS) { + Some(b) => generators::request_force(&co, b.clone()).await.as_bool()?, + None => false, + }; + + let mut drv = Derivation::default(); + drv.outputs.insert("out".to_string(), Default::default()); + + // Configure fixed-output derivations if required. + + async fn select_string( + co: &GenCo, + attrs: &NixAttrs, + key: &str, + ) -> Result<Option<String>, ErrorKind> { + if let Some(attr) = attrs.select(key) { + return Ok(Some(strong_coerce_to_string(co, attr.clone()).await?)); + } + + Ok(None) + } + + for (name, value) in input.clone().into_iter_sorted() { + let value = generators::request_force(&co, value).await; + if ignore_nulls && matches!(value, Value::Null) { + continue; + } + + let val_str = strong_coerce_to_string(&co, value.clone()).await?; + + // handle_derivation_parameters tells us whether the + // argument should be added to the environment; continue + // to the next one otherwise + if !handle_derivation_parameters(&mut drv, &co, name.as_str(), &value, &val_str).await? + { + continue; + } + + // Most of these are also added to the builder's environment in "raw" form. + if drv + .environment + .insert(name.as_str().to_string(), val_str) + .is_some() + { + return Err(Error::DuplicateEnvVar(name.as_str().to_string()).into()); + } + } + + populate_output_configuration( + &mut drv, + select_string(&co, &input, "outputHash") + .await + .context("evaluating the `outputHash` parameter")?, + select_string(&co, &input, "outputHashAlgo") + .await + .context("evaluating the `outputHashAlgo` parameter")?, + select_string(&co, &input, "outputHashMode") + .await + .context("evaluating the `outputHashMode` parameter")?, + )?; + + // Scan references in relevant attributes to detect any build-references. + let references = { + let state = state.borrow(); + if state.is_empty() { + // skip reference scanning, create an empty result + Default::default() + } else { + let mut refscan = state.reference_scanner(); + drv.arguments.iter().for_each(|s| refscan.scan_str(s)); + drv.environment.values().for_each(|s| refscan.scan_str(s)); + refscan.scan_str(&drv.builder); + refscan.finalise() + } + }; + + // Each output name needs to exist in the environment, at this + // point initialised as an empty string because that is the + // way of Golang ;) + for output in drv.outputs.keys() { + if drv + .environment + .insert(output.to_string(), String::new()) + .is_some() + { + return Err(Error::ShadowedOutput(output.to_string()).into()); + } + } + + let mut known_paths = state.borrow_mut(); + populate_inputs(&mut drv, &known_paths, references); + + // At this point, derivation fields are fully populated from + // eval data structures. + drv.validate(false).map_err(Error::InvalidDerivation)?; + + // Calculate the derivation_or_fod_hash for the current derivation. + // This one is still intermediate (so not added to known_paths) + let derivation_or_fod_hash_tmp = + drv.derivation_or_fod_hash(|drv| known_paths.get_hash_derivation_modulo(drv)); + + // Mutate the Derivation struct and set output paths + drv.calculate_output_paths(&name, &derivation_or_fod_hash_tmp) + .map_err(Error::InvalidDerivation)?; + + let derivation_path = drv + .calculate_derivation_path(&name) + .map_err(Error::InvalidDerivation)?; + + // recompute the hash derivation modulo and add to known_paths + let derivation_or_fod_hash_final = + drv.derivation_or_fod_hash(|drv| known_paths.get_hash_derivation_modulo(drv)); + + known_paths.add_hash_derivation_modulo( + derivation_path.to_absolute_path(), + &derivation_or_fod_hash_final, + ); + + // mark all the new paths as known + let output_names: Vec<String> = drv.outputs.keys().map(Clone::clone).collect(); + known_paths.drv(derivation_path.to_absolute_path(), &output_names); + + for (output_name, output) in &drv.outputs { + known_paths.output( + &output.path, + output_name, + derivation_path.to_absolute_path(), + ); + } + + let mut new_attrs: Vec<(String, String)> = drv + .outputs + .into_iter() + .map(|(name, output)| (name, output.path)) + .collect(); + + new_attrs.push(("drvPath".to_string(), derivation_path.to_absolute_path())); + + Ok(Value::Attrs(Box::new(NixAttrs::from_iter( + new_attrs.into_iter(), + )))) + } + + #[builtin("toFile")] + async fn builtin_to_file( + state: Rc<RefCell<KnownPaths>>, + co: GenCo, + name: Value, + content: Value, + ) -> Result<Value, ErrorKind> { + let name = name + .to_str() + .context("evaluating the `name` parameter of builtins.toFile")?; + let content = content + .to_str() + .context("evaluating the `content` parameter of builtins.toFile")?; + + let mut refscan = state.borrow().reference_scanner(); + refscan.scan_str(content.as_str()); + let refs = { + let paths = state.borrow(); + refscan + .finalise() + .into_iter() + .map(|path| paths[&path].path.to_string()) + .collect::<Vec<_>>() + }; + + // TODO: fail on derivation references (only "plain" is allowed here) + + let path = nix_compat::store_path::build_text_path(name.as_str(), content.as_str(), refs) + .map_err(|_e| { + nix_compat::derivation::DerivationError::InvalidOutputName( + name.as_str().to_string(), + ) + }) + .map_err(Error::InvalidDerivation)? + .to_absolute_path(); + + state.borrow_mut().plain(&path); + + // TODO: actually persist the file in the store at that path ... + + Ok(Value::String(path.into())) + } +} + +pub use derivation_builtins::builtins as derivation_builtins; + +#[cfg(test)] +mod tests { + use nix_compat::store_path::hash_placeholder; + use tvix_eval::observer::NoOpObserver; + + // TODO: These tests are commented out because we do not have + // scaffolding to drive generators during testing at the moment. + + // static mut OBSERVER: NoOpObserver = NoOpObserver {}; + + // // Creates a fake VM for tests, which can *not* actually be + // // used to force (most) values but can satisfy the type + // // parameter. + // fn fake_vm() -> VM<'static> { + // // safe because accessing the observer doesn't actually do anything + // unsafe { + // VM::new( + // Default::default(), + // Box::new(tvix_eval::DummyIO), + // &mut OBSERVER, + // Default::default(), + // todo!(), + // ) + // } + // } + + // #[test] + // fn populate_outputs_ok() { + // let mut vm = fake_vm(); + // let mut drv = Derivation::default(); + // drv.outputs.insert("out".to_string(), Default::default()); + + // let outputs = NixList::construct( + // 2, + // vec![Value::String("foo".into()), Value::String("bar".into())], + // ); + + // populate_outputs(&mut vm, &mut drv, outputs).expect("populate_outputs should succeed"); + + // assert_eq!(drv.outputs.len(), 2); + // assert!(drv.outputs.contains_key("bar")); + // assert!(drv.outputs.contains_key("foo")); + // } + + // #[test] + // fn populate_outputs_duplicate() { + // let mut vm = fake_vm(); + // let mut drv = Derivation::default(); + // drv.outputs.insert("out".to_string(), Default::default()); + + // let outputs = NixList::construct( + // 2, + // vec![Value::String("foo".into()), Value::String("foo".into())], + // ); + + // populate_outputs(&mut vm, &mut drv, outputs) + // .expect_err("supplying duplicate outputs should fail"); + // } + + // #[test] + // fn populate_inputs_empty() { + // let mut drv = Derivation::default(); + // let paths = KnownPaths::default(); + // let inputs = vec![]; + + // populate_inputs(&mut drv, &paths, inputs); + + // assert!(drv.input_sources.is_empty()); + // assert!(drv.input_derivations.is_empty()); + // } + + // #[test] + // fn populate_inputs_all() { + // let mut drv = Derivation::default(); + + // let mut paths = KnownPaths::default(); + // paths.plain("/nix/store/fn7zvafq26f0c8b17brs7s95s10ibfzs-foo"); + // paths.drv( + // "/nix/store/aqffiyqx602lbam7n1zsaz3yrh6v08pc-bar.drv", + // &["out"], + // ); + // paths.output( + // "/nix/store/zvpskvjwi72fjxg0vzq822sfvq20mq4l-bar", + // "out", + // "/nix/store/aqffiyqx602lbam7n1zsaz3yrh6v08pc-bar.drv", + // ); + + // let inputs = vec![ + // "/nix/store/fn7zvafq26f0c8b17brs7s95s10ibfzs-foo".into(), + // "/nix/store/aqffiyqx602lbam7n1zsaz3yrh6v08pc-bar.drv".into(), + // "/nix/store/zvpskvjwi72fjxg0vzq822sfvq20mq4l-bar".into(), + // ]; + + // populate_inputs(&mut drv, &paths, inputs); + + // assert_eq!(drv.input_sources.len(), 1); + // assert!(drv + // .input_sources + // .contains("/nix/store/fn7zvafq26f0c8b17brs7s95s10ibfzs-foo")); + + // assert_eq!(drv.input_derivations.len(), 1); + // assert!(drv + // .input_derivations + // .contains_key("/nix/store/aqffiyqx602lbam7n1zsaz3yrh6v08pc-bar.drv")); + // } + + // #[test] + // fn populate_output_config_std() { + // let mut drv = Derivation::default(); + + // populate_output_configuration(&mut drv, None, None, None) + // .expect("populate_output_configuration() should succeed"); + + // assert_eq!(drv, Derivation::default(), "derivation should be unchanged"); + // } + + // #[test] + // fn populate_output_config_fod() { + // let mut drv = Derivation::default(); + // drv.outputs.insert("out".to_string(), Default::default()); + + // populate_output_configuration( + // &mut drv, + // Some("0000000000000000000000000000000000000000000000000000000000000000".into()), + // Some("sha256".into()), + // None, + // ) + // .expect("populate_output_configuration() should succeed"); + + // let expected = Hash { + // algo: "sha256".into(), + // digest: "0000000000000000000000000000000000000000000000000000000000000000".into(), + // }; + + // assert_eq!(drv.outputs["out"].hash, Some(expected)); + // } + + // #[test] + // fn populate_output_config_fod_recursive() { + // let mut drv = Derivation::default(); + // drv.outputs.insert("out".to_string(), Default::default()); + + // populate_output_configuration( + // &mut drv, + // Some("0000000000000000000000000000000000000000000000000000000000000000".into()), + // Some("sha256".into()), + // Some("recursive".into()), + // ) + // .expect("populate_output_configuration() should succeed"); + + // let expected = Hash { + // algo: "r:sha256".into(), + // digest: "0000000000000000000000000000000000000000000000000000000000000000".into(), + // }; + + // assert_eq!(drv.outputs["out"].hash, Some(expected)); + // } + + // #[test] + // /// hash_algo set to sha256, but SRI hash passed + // fn populate_output_config_flat_sri_sha256() { + // let mut drv = Derivation::default(); + // drv.outputs.insert("out".to_string(), Default::default()); + + // populate_output_configuration( + // &mut drv, + // Some("sha256-swapHA/ZO8QoDPwumMt6s5gf91oYe+oyk4EfRSyJqMg=".into()), + // Some("sha256".into()), + // Some("flat".into()), + // ) + // .expect("populate_output_configuration() should succeed"); + + // let expected = Hash { + // algo: "sha256".into(), + // digest: "b306a91c0fd93bc4280cfc2e98cb7ab3981ff75a187bea3293811f452c89a8c8".into(), // lower hex + // }; + + // assert_eq!(drv.outputs["out"].hash, Some(expected)); + // } + + // #[test] + // /// hash_algo set to empty string, SRI hash passed + // fn populate_output_config_flat_sri() { + // let mut drv = Derivation::default(); + // drv.outputs.insert("out".to_string(), Default::default()); + + // populate_output_configuration( + // &mut drv, + // Some("sha256-s6JN6XqP28g1uYMxaVAQMLiXcDG8tUs7OsE3QPhGqzA=".into()), + // Some("".into()), + // Some("flat".into()), + // ) + // .expect("populate_output_configuration() should succeed"); + + // let expected = Hash { + // algo: "sha256".into(), + // digest: "b3a24de97a8fdbc835b9833169501030b8977031bcb54b3b3ac13740f846ab30".into(), // lower hex + // }; + + // assert_eq!(drv.outputs["out"].hash, Some(expected)); + // } + + // #[test] + // fn handle_outputs_parameter() { + // let mut vm = fake_vm(); + // let mut drv = Derivation::default(); + // drv.outputs.insert("out".to_string(), Default::default()); + + // let outputs = Value::List(NixList::construct( + // 2, + // vec![Value::String("foo".into()), Value::String("bar".into())], + // )); + // let outputs_str = outputs + // .coerce_to_string(CoercionKind::Strong, &mut vm) + // .unwrap(); + + // handle_derivation_parameters(&mut drv, &mut vm, "outputs", &outputs, outputs_str.as_str()) + // .expect("handling 'outputs' parameter should succeed"); + + // assert_eq!(drv.outputs.len(), 2); + // assert!(drv.outputs.contains_key("bar")); + // assert!(drv.outputs.contains_key("foo")); + // } + + // #[test] + // fn handle_args_parameter() { + // let mut vm = fake_vm(); + // let mut drv = Derivation::default(); + + // let args = Value::List(NixList::construct( + // 3, + // vec![ + // Value::String("--foo".into()), + // Value::String("42".into()), + // Value::String("--bar".into()), + // ], + // )); + + // let args_str = args + // .coerce_to_string(CoercionKind::Strong, &mut vm) + // .unwrap(); + + // handle_derivation_parameters(&mut drv, &mut vm, "args", &args, args_str.as_str()) + // .expect("handling 'args' parameter should succeed"); + + // assert_eq!( + // drv.arguments, + // vec!["--foo".to_string(), "42".to_string(), "--bar".to_string()] + // ); + // } + + #[test] + fn builtins_placeholder_hashes() { + assert_eq!( + hash_placeholder("out").as_str(), + "/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9" + ); + + assert_eq!( + hash_placeholder("").as_str(), + "/171rf4jhx57xqz3p7swniwkig249cif71pa08p80mgaf0mqz5bmr" + ); + } +} diff --git a/tvix/cli/src/errors.rs b/tvix/cli/src/errors.rs new file mode 100644 index 000000000000..6f328dc0003c --- /dev/null +++ b/tvix/cli/src/errors.rs @@ -0,0 +1,28 @@ +use nix_compat::{derivation::DerivationError, nixhash}; +use std::rc::Rc; +use thiserror::Error; + +/// Errors related to derivation construction +#[derive(Debug, Error)] +pub enum Error { + #[error("an output with the name '{0}' is already defined")] + DuplicateOutput(String), + #[error("fixed-output derivations can only have the default `out`-output")] + ConflictingOutputTypes, + #[error("the environment variable '{0}' has already been set in this derivation")] + DuplicateEnvVar(String), + #[error("the environment variable '{0}' shadows the name of an output")] + ShadowedOutput(String), + #[error("invalid derivation parameters: {0}")] + InvalidDerivation(DerivationError), + #[error("invalid output hash: {0}")] + InvalidOutputHash(nixhash::Error), + #[error("invalid output hash mode: '{0}', only 'recursive' and 'flat` are supported")] + InvalidOutputHashMode(String), +} + +impl From<Error> for tvix_eval::ErrorKind { + fn from(err: Error) -> Self { + tvix_eval::ErrorKind::TvixError(Rc::new(err)) + } +} diff --git a/tvix/cli/src/fetchurl.nix b/tvix/cli/src/fetchurl.nix new file mode 100644 index 000000000000..3f182a5a319b --- /dev/null +++ b/tvix/cli/src/fetchurl.nix @@ -0,0 +1,53 @@ +# SPDX-License-Identifier: LGPL-2.1 +# +# This file is vendored from C++ Nix, as it needs to be bundled with +# an evaluator to be able to evaluate nixpkgs. +# +# Source: https://github.com/NixOS/nix/blob/2.3.16/corepkgs/fetchurl.nix + +{ system ? "" # obsolete +, url +, hash ? "" # an SRI hash + + # Legacy hash specification +, md5 ? "" +, sha1 ? "" +, sha256 ? "" +, sha512 ? "" +, outputHash ? if hash != "" then hash else if sha512 != "" then sha512 else if sha1 != "" then sha1 else if md5 != "" then md5 else sha256 +, outputHashAlgo ? if hash != "" then "" else if sha512 != "" then "sha512" else if sha1 != "" then "sha1" else if md5 != "" then "md5" else "sha256" + +, executable ? false +, unpack ? false +, name ? baseNameOf (toString url) +}: + +derivation { + builder = "builtin:fetchurl"; + + # New-style output content requirements. + inherit outputHashAlgo outputHash; + outputHashMode = if unpack || executable then "recursive" else "flat"; + + inherit name url executable unpack; + + system = "builtin"; + + # No need to double the amount of network traffic + preferLocalBuild = true; + + impureEnvVars = [ + # We borrow these environment variables from the caller to allow + # easy proxy configuration. This is impure, but a fixed-output + # derivation like fetchurl is allowed to do so since its result is + # by definition pure. + "http_proxy" + "https_proxy" + "ftp_proxy" + "all_proxy" + "no_proxy" + ]; + + # To make "nix-prefetch-url" work. + urls = [ url ]; +} diff --git a/tvix/cli/src/known_paths.rs b/tvix/cli/src/known_paths.rs new file mode 100644 index 000000000000..07373ef0da7a --- /dev/null +++ b/tvix/cli/src/known_paths.rs @@ -0,0 +1,186 @@ +//! This module implements logic required for persisting known paths +//! during an evaluation. +//! +//! Tvix needs to be able to keep track of each Nix store path that it +//! knows about during the scope of a single evaluation and its +//! related builds. +//! +//! This data is required to scan derivation inputs for the build +//! references (the "build closure") that they make use of. +//! +//! Please see //tvix/eval/docs/build-references.md for more +//! information. + +use crate::refscan::{ReferenceScanner, STORE_PATH_LEN}; +use nix_compat::nixhash::NixHash; +use std::{ + collections::{hash_map, BTreeSet, HashMap}, + ops::Index, +}; + +#[derive(Debug, PartialEq)] +pub enum PathKind { + /// A literal derivation (`.drv`-file), and the *names* of its outputs. + Derivation { output_names: BTreeSet<String> }, + + /// An output of a derivation, its name, and the path of its derivation. + Output { name: String, derivation: String }, + + /// A plain store path (e.g. source files copied to the store). + Plain, +} + +#[derive(Debug, PartialEq)] +pub struct KnownPath { + pub path: String, + pub kind: PathKind, +} + +impl KnownPath { + fn new(path: String, kind: PathKind) -> Self { + KnownPath { path, kind } + } +} + +/// Internal struct to prevent accidental leaks of the truncated path +/// names. +#[repr(transparent)] +#[derive(Clone, Debug, Default, PartialEq, PartialOrd, Ord, Eq, Hash)] +pub struct PathName(String); + +impl From<&str> for PathName { + fn from(s: &str) -> Self { + PathName(s[..STORE_PATH_LEN].to_string()) + } +} + +/// This instance is required to pass PathName instances as needles to +/// the reference scanner. +impl AsRef<[u8]> for PathName { + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } +} + +#[derive(Debug, Default)] +pub struct KnownPaths { + /// All known paths, keyed by a truncated version of their store + /// path used for reference scanning. + paths: HashMap<PathName, KnownPath>, + + /// All known derivation or FOD hashes. + /// + /// Keys are derivation paths, values is the NixHash. + derivation_or_fod_hashes: HashMap<String, NixHash>, +} + +impl Index<&PathName> for KnownPaths { + type Output = KnownPath; + + fn index(&self, index: &PathName) -> &Self::Output { + &self.paths[index] + } +} + +impl KnownPaths { + fn insert_path(&mut self, path: String, path_kind: PathKind) { + match self.paths.entry(path.as_str().into()) { + hash_map::Entry::Vacant(entry) => { + entry.insert(KnownPath::new(path, path_kind)); + } + + hash_map::Entry::Occupied(mut entry) => { + match (path_kind, &mut entry.get_mut().kind) { + // These variant combinations require no "merging action". + (PathKind::Plain, PathKind::Plain) => (), + (PathKind::Output { .. }, PathKind::Output { .. }) => (), + + ( + PathKind::Derivation { output_names: new }, + PathKind::Derivation { + output_names: ref mut old, + }, + ) => { + old.extend(new); + } + + _ => panic!( + "path '{}' inserted twice with different types", + entry.key().0 + ), + }; + } + }; + } + + /// Mark a plain path as known. + pub fn plain<S: ToString>(&mut self, path: S) { + self.insert_path(path.to_string(), PathKind::Plain); + } + + /// Mark a derivation as known. + pub fn drv<P: ToString, O: ToString>(&mut self, path: P, outputs: &[O]) { + self.insert_path( + path.to_string(), + PathKind::Derivation { + output_names: outputs.iter().map(ToString::to_string).collect(), + }, + ); + } + + /// Mark a derivation output path as known. + pub fn output<P: ToString, N: ToString, D: ToString>( + &mut self, + output_path: P, + name: N, + drv_path: D, + ) { + self.insert_path( + output_path.to_string(), + PathKind::Output { + name: name.to_string(), + derivation: drv_path.to_string(), + }, + ); + } + + /// Checks whether there are any known paths. If not, a reference + /// scanner can not be created. + pub fn is_empty(&self) -> bool { + self.paths.is_empty() + } + + /// Create a reference scanner from the current set of known paths. + pub fn reference_scanner(&self) -> ReferenceScanner<PathName> { + let candidates = self.paths.keys().map(Clone::clone).collect(); + ReferenceScanner::new(candidates) + } + + /// Fetch the opaque "hash derivation modulo" for a given derivation path. + pub fn get_hash_derivation_modulo(&self, drv_path: &str) -> NixHash { + // TODO: we rely on an invariant that things *should* have + // been calculated if we get this far. + self.derivation_or_fod_hashes[drv_path].clone() + } + + pub fn add_hash_derivation_modulo<D: ToString>( + &mut self, + drv: D, + hash_derivation_modulo: &NixHash, + ) { + #[allow(unused_variables)] // assertions on this only compiled in debug builds + let old = self + .derivation_or_fod_hashes + .insert(drv.to_string(), hash_derivation_modulo.to_owned()); + + #[cfg(debug_assertions)] + { + if let Some(old) = old { + debug_assert!( + old == *hash_derivation_modulo, + "hash derivation modulo for a given derivation should always be calculated the same" + ); + } + } + } +} diff --git a/tvix/cli/src/main.rs b/tvix/cli/src/main.rs new file mode 100644 index 000000000000..28b75dd907aa --- /dev/null +++ b/tvix/cli/src/main.rs @@ -0,0 +1,284 @@ +mod derivation; +mod errors; +mod known_paths; +mod nix_compat; +mod refscan; +mod tvix_io; + +use std::cell::RefCell; +use std::rc::Rc; +use std::{fs, path::PathBuf}; + +use clap::Parser; +use known_paths::KnownPaths; +use rustyline::{error::ReadlineError, Editor}; +use tvix_eval::observer::{DisassemblingObserver, TracingObserver}; +use tvix_eval::{Builtin, Value}; +use tvix_store::blobservice::MemoryBlobService; +use tvix_store::directoryservice::MemoryDirectoryService; +use tvix_store::pathinfoservice::MemoryPathInfoService; + +#[derive(Parser)] +struct Args { + /// Path to a script to evaluate + script: Option<PathBuf>, + + #[clap(long, short = 'E')] + expr: Option<String>, + + /// Dump the raw AST to stdout before interpreting + #[clap(long, env = "TVIX_DISPLAY_AST")] + display_ast: bool, + + /// Dump the bytecode to stdout before evaluating + #[clap(long, env = "TVIX_DUMP_BYTECODE")] + dump_bytecode: bool, + + /// Trace the runtime of the VM + #[clap(long, env = "TVIX_TRACE_RUNTIME")] + trace_runtime: bool, + + /// Only compile, but do not execute code. This will make Tvix act + /// sort of like a linter. + #[clap(long)] + compile_only: bool, + + /// Don't print warnings. + #[clap(long)] + no_warnings: bool, + + /// A colon-separated list of directories to use to resolve `<...>`-style paths + #[clap(long, short = 'I', env = "NIX_PATH")] + nix_search_path: Option<String>, + + /// Print "raw" (unquoted) output. + #[clap(long)] + raw: bool, + + /// Strictly evaluate values, traversing them and forcing e.g. + /// elements of lists and attribute sets before printing the + /// return value. + #[clap(long)] + strict: bool, +} + +/// Interprets the given code snippet, printing out warnings, errors +/// and the result itself. The return value indicates whether +/// evaluation succeeded. +fn interpret(code: &str, path: Option<PathBuf>, args: &Args, explain: bool) -> bool { + let mut eval = tvix_eval::Evaluation::new_impure(code, path); + let known_paths: Rc<RefCell<KnownPaths>> = Default::default(); + + eval.strict = args.strict; + + let blob_service = MemoryBlobService::default(); + let directory_service = MemoryDirectoryService::default(); + let path_info_service = MemoryPathInfoService::default(); + let nar_calculation_service = tvix_store::nar::NonCachingNARCalculationService::new( + blob_service.clone(), + directory_service.clone(), + ); + + eval.io_handle = Box::new(tvix_io::TvixIO::new( + known_paths.clone(), + tvix_store::TvixStoreIO::new( + blob_service, + directory_service, + path_info_service, + nar_calculation_service, + ), + )); + + // bundle fetchurl.nix (used in nixpkgs) by resolving <nix> to + // `/__corepkgs__`, which has special handling in [`nix_compat`]. + eval.nix_path = args + .nix_search_path + .as_ref() + .map(|p| format!("nix=/__corepkgs__:{}", p)) + .or_else(|| Some("nix=/__corepkgs__".to_string())); + + eval.builtins + .extend(derivation::derivation_builtins(known_paths)); + + // Add the actual `builtins.derivation` from compiled Nix code + eval.src_builtins + .push(("derivation", include_str!("derivation.nix"))); + + let source_map = eval.source_map(); + let result = { + let mut compiler_observer = + DisassemblingObserver::new(source_map.clone(), std::io::stderr()); + if args.dump_bytecode { + eval.compiler_observer = Some(&mut compiler_observer); + } + + let mut runtime_observer = TracingObserver::new(std::io::stderr()); + if args.trace_runtime { + eval.runtime_observer = Some(&mut runtime_observer); + } + + eval.evaluate() + }; + + if args.display_ast { + if let Some(ref expr) = result.expr { + eprintln!("AST: {}", tvix_eval::pretty_print_expr(expr)); + } + } + + for error in &result.errors { + error.fancy_format_stderr(&source_map); + } + + if !args.no_warnings { + for warning in &result.warnings { + warning.fancy_format_stderr(&source_map); + } + } + + if let Some(value) = result.value.as_ref() { + if explain { + println!("=> {}", value.explain()); + } else { + println_result(value, args.raw); + } + } + + // inform the caller about any errors + result.errors.is_empty() +} + +/// Interpret the given code snippet, but only run the Tvix compiler +/// on it and return errors and warnings. +fn lint(code: &str, path: Option<PathBuf>, args: &Args) -> bool { + let mut eval = tvix_eval::Evaluation::new_impure(code, path); + eval.strict = args.strict; + + let source_map = eval.source_map(); + + let mut compiler_observer = DisassemblingObserver::new(source_map.clone(), std::io::stderr()); + + if args.dump_bytecode { + eval.compiler_observer = Some(&mut compiler_observer); + } + + if args.trace_runtime { + eprintln!("warning: --trace-runtime has no effect with --compile-only!"); + } + + let result = eval.compile_only(); + + if args.display_ast { + if let Some(ref expr) = result.expr { + eprintln!("AST: {}", tvix_eval::pretty_print_expr(expr)); + } + } + + for error in &result.errors { + error.fancy_format_stderr(&source_map); + } + + for warning in &result.warnings { + warning.fancy_format_stderr(&source_map); + } + + // inform the caller about any errors + result.errors.is_empty() +} + +fn main() { + let args = Args::parse(); + + if let Some(file) = &args.script { + run_file(file.clone(), &args) + } else if let Some(expr) = &args.expr { + if !interpret(expr, None, &args, false) { + std::process::exit(1); + } + } else { + run_prompt(&args) + } +} + +fn run_file(mut path: PathBuf, args: &Args) { + if path.is_dir() { + path.push("default.nix"); + } + let contents = fs::read_to_string(&path).expect("failed to read the input file"); + + let success = if args.compile_only { + lint(&contents, Some(path), args) + } else { + interpret(&contents, Some(path), args, false) + }; + + if !success { + std::process::exit(1); + } +} + +fn println_result(result: &Value, raw: bool) { + if raw { + println!("{}", result.to_str().unwrap().as_str()) + } else { + println!("=> {} :: {}", result, result.type_of()) + } +} + +fn state_dir() -> Option<PathBuf> { + let mut path = dirs::data_dir(); + if let Some(p) = path.as_mut() { + p.push("tvix") + } + path +} + +fn run_prompt(args: &Args) { + let mut rl = Editor::<()>::new().expect("should be able to launch rustyline"); + + if args.compile_only { + eprintln!("warning: `--compile-only` has no effect on REPL usage!"); + } + + let history_path = match state_dir() { + // Attempt to set up these paths, but do not hard fail if it + // doesn't work. + Some(mut path) => { + let _ = std::fs::create_dir_all(&path); + path.push("history.txt"); + let _ = rl.load_history(&path); + Some(path) + } + + None => None, + }; + + loop { + let readline = rl.readline("tvix-repl> "); + match readline { + Ok(line) => { + if line.is_empty() { + continue; + } + + rl.add_history_entry(&line); + + if let Some(without_prefix) = line.strip_prefix(":d ") { + interpret(without_prefix, None, args, true); + } else { + interpret(&line, None, args, false); + } + } + Err(ReadlineError::Interrupted) | Err(ReadlineError::Eof) => break, + + Err(err) => { + eprintln!("error: {}", err); + break; + } + } + } + + if let Some(path) = history_path { + rl.save_history(&path).unwrap(); + } +} diff --git a/tvix/cli/src/nix_compat.rs b/tvix/cli/src/nix_compat.rs new file mode 100644 index 000000000000..bda238983839 --- /dev/null +++ b/tvix/cli/src/nix_compat.rs @@ -0,0 +1,122 @@ +//! This module implements (temporary) compatibility shims between +//! Tvix and C++ Nix. +//! +//! These are not intended to be long-lived, but should bootstrap Tvix +//! by piggybacking off functionality that already exists in Nix and +//! is still being implemented in Tvix. + +use std::collections::HashMap; +use std::path::Path; +use std::process::Command; +use std::sync::RwLock; +use std::{io, path::PathBuf}; + +use smol_str::SmolStr; +use tvix_eval::{EvalIO, FileType, StdIO}; + +/// Compatibility implementation of [`EvalIO`] that uses C++ Nix to +/// write files to the Nix store. +pub struct NixCompatIO { + /// Most IO requests are tunneled through to [`tvix_eval::StdIO`] + /// instead. + underlying: StdIO, + + /// Cache paths for identical files being imported to the store. + // TODO(tazjin): This could be done better by having a thunk cache + // for these calls on the eval side, but that is a little more + // complex. + import_cache: RwLock<HashMap<PathBuf, PathBuf>>, +} + +impl EvalIO for NixCompatIO { + fn store_dir(&self) -> Option<String> { + Some("/nix/store".into()) + } + + // Pass path imports through to `nix-store --add` + fn import_path(&self, path: &Path) -> Result<PathBuf, io::Error> { + let path = path.to_owned(); + if let Some(path) = self + .import_cache + .read() + .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))? + .get(&path) + { + return Ok(path.to_path_buf()); + } + + let store_path = self.add_to_store(&path)?; + + self.import_cache + .write() + .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))? + .insert(path, store_path.clone()); + + Ok(store_path) + } + + // Pass the rest of the functions through to `Self::underlying` + fn path_exists(&self, path: &Path) -> Result<bool, io::Error> { + if path.starts_with("/__corepkgs__") { + return Ok(true); + } + + self.underlying.path_exists(path) + } + + fn read_to_string(&self, path: &Path) -> Result<String, io::Error> { + // Bundled version of corepkgs/fetchurl.nix. This workaround + // is similar to what cppnix does for passing the path + // through. + // + // TODO: this comparison is bad and allocates, we should use + // the sane path library. + if path.starts_with("/__corepkgs__/fetchurl.nix") { + return Ok(include_str!("fetchurl.nix").to_string()); + } + + self.underlying.read_to_string(path) + } + + fn read_dir(&self, path: &Path) -> Result<Vec<(SmolStr, FileType)>, io::Error> { + self.underlying.read_dir(path) + } +} + +impl NixCompatIO { + pub fn new() -> Self { + NixCompatIO { + underlying: StdIO, + import_cache: RwLock::new(HashMap::new()), + } + } + + /// Add a path to the Nix store using the `nix-store --add` + /// functionality from C++ Nix. + fn add_to_store(&self, path: &Path) -> Result<PathBuf, io::Error> { + if !path.try_exists()? { + return Err(io::Error::from(io::ErrorKind::NotFound)); + } + + let mut cmd = Command::new("nix-store"); + cmd.arg("--add"); + cmd.arg(path); + + let out = cmd.output()?; + + if !out.status.success() { + return Err(io::Error::new( + io::ErrorKind::Other, + String::from_utf8_lossy(&out.stderr).trim().to_owned(), + )); + } + + let out_path_str = String::from_utf8(out.stdout) + .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?; + let out_path_trimmed = out_path_str.trim(); + + let mut out_path = PathBuf::new(); + out_path.push(out_path_trimmed); + Ok(out_path) + } +} diff --git a/tvix/cli/src/refscan.rs b/tvix/cli/src/refscan.rs new file mode 100644 index 000000000000..739cf77ed26f --- /dev/null +++ b/tvix/cli/src/refscan.rs @@ -0,0 +1,115 @@ +//! Simple scanner for non-overlapping, known references of Nix store paths in a +//! given string. +//! +//! This is used for determining build references (see +//! //tvix/eval/docs/build-references.md for more details). +//! +//! The scanner itself is an Aho-Corasick automaton, using the `aho-corasick` +//! crate. + +use std::collections::BTreeSet; +use wu_manber::TwoByteWM; + +pub const STORE_PATH_LEN: usize = "/nix/store/00000000000000000000000000000000".len(); + +/// Represents a "primed" reference scanner with an automaton that knows the set +/// of store paths to scan for. +pub struct ReferenceScanner<P: Ord + AsRef<[u8]>> { + candidates: Vec<P>, + searcher: Option<TwoByteWM>, + matches: Vec<usize>, +} + +impl<P: Clone + Ord + AsRef<[u8]>> ReferenceScanner<P> { + /// Construct a new `ReferenceScanner` that knows how to scan for the given + /// candidate store paths. + pub fn new(candidates: Vec<P>) -> Self { + let searcher = if candidates.is_empty() { + None + } else { + Some(TwoByteWM::new(&candidates)) + }; + + ReferenceScanner { + searcher, + candidates, + matches: Default::default(), + } + } + + /// Scan the given string for all non-overlapping matches and collect them + /// in the scanner. + pub fn scan_str(&mut self, haystack: &str) { + if haystack.len() < STORE_PATH_LEN { + return; + } + + if let Some(searcher) = &self.searcher { + for m in searcher.find(&haystack) { + self.matches.push(m.pat_idx); + } + } + } + + /// Finalise the reference scanner and return the resulting matches. + pub fn finalise(self) -> BTreeSet<P> { + self.matches + .into_iter() + .map(|idx| self.candidates[idx].clone()) + .collect() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + // The actual derivation of `nixpkgs.hello`. + const HELLO_DRV: &'static str = r#"Derive([("out","/nix/store/33l4p0pn0mybmqzaxfkpppyh7vx1c74p-hello-2.12.1","","")],[("/nix/store/6z1jfnqqgyqr221zgbpm30v91yfj3r45-bash-5.1-p16.drv",["out"]),("/nix/store/ap9g09fxbicj836zm88d56dn3ff4clxl-stdenv-linux.drv",["out"]),("/nix/store/pf80kikyxr63wrw56k00i1kw6ba76qik-hello-2.12.1.tar.gz.drv",["out"])],["/nix/store/9krlzvny65gdc8s7kpb6lkx8cd02c25b-default-builder.sh"],"x86_64-linux","/nix/store/4xw8n979xpivdc46a9ndcvyhwgif00hz-bash-5.1-p16/bin/bash",["-e","/nix/store/9krlzvny65gdc8s7kpb6lkx8cd02c25b-default-builder.sh"],[("buildInputs",""),("builder","/nix/store/4xw8n979xpivdc46a9ndcvyhwgif00hz-bash-5.1-p16/bin/bash"),("cmakeFlags",""),("configureFlags",""),("depsBuildBuild",""),("depsBuildBuildPropagated",""),("depsBuildTarget",""),("depsBuildTargetPropagated",""),("depsHostHost",""),("depsHostHostPropagated",""),("depsTargetTarget",""),("depsTargetTargetPropagated",""),("doCheck","1"),("doInstallCheck",""),("mesonFlags",""),("name","hello-2.12.1"),("nativeBuildInputs",""),("out","/nix/store/33l4p0pn0mybmqzaxfkpppyh7vx1c74p-hello-2.12.1"),("outputs","out"),("patches",""),("pname","hello"),("propagatedBuildInputs",""),("propagatedNativeBuildInputs",""),("src","/nix/store/pa10z4ngm0g83kx9mssrqzz30s84vq7k-hello-2.12.1.tar.gz"),("stdenv","/nix/store/cp65c8nk29qq5cl1wyy5qyw103cwmax7-stdenv-linux"),("strictDeps",""),("system","x86_64-linux"),("version","2.12.1")])"#; + + #[test] + fn test_no_patterns() { + let mut scanner: ReferenceScanner<String> = ReferenceScanner::new(vec![]); + + scanner.scan_str(HELLO_DRV); + + let result = scanner.finalise(); + + assert_eq!(result.len(), 0); + } + + #[test] + fn test_single_match() { + let mut scanner = ReferenceScanner::new(vec![ + "/nix/store/4xw8n979xpivdc46a9ndcvyhwgif00hz-bash-5.1-p16".to_string(), + ]); + scanner.scan_str(HELLO_DRV); + + let result = scanner.finalise(); + + assert_eq!(result.len(), 1); + assert!(result.contains("/nix/store/4xw8n979xpivdc46a9ndcvyhwgif00hz-bash-5.1-p16")); + } + + #[test] + fn test_multiple_matches() { + let candidates = vec![ + // these exist in the drv: + "/nix/store/33l4p0pn0mybmqzaxfkpppyh7vx1c74p-hello-2.12.1".to_string(), + "/nix/store/pf80kikyxr63wrw56k00i1kw6ba76qik-hello-2.12.1.tar.gz.drv".to_string(), + "/nix/store/cp65c8nk29qq5cl1wyy5qyw103cwmax7-stdenv-linux".to_string(), + // this doesn't: + "/nix/store/fn7zvafq26f0c8b17brs7s95s10ibfzs-emacs-28.2.drv".to_string(), + ]; + + let mut scanner = ReferenceScanner::new(candidates.clone()); + scanner.scan_str(HELLO_DRV); + + let result = scanner.finalise(); + assert_eq!(result.len(), 3); + + for c in candidates[..3].iter() { + assert!(result.contains(c)); + } + } +} diff --git a/tvix/cli/src/tvix_io.rs b/tvix/cli/src/tvix_io.rs new file mode 100644 index 000000000000..8ca660f87b05 --- /dev/null +++ b/tvix/cli/src/tvix_io.rs @@ -0,0 +1,79 @@ +//! This module implements a wrapper around tvix-eval's [EvalIO] type, +//! adding functionality which is required by tvix-cli: +//! +//! 1. Marking plain paths known to the reference scanner. +//! 2. Handling the C++ Nix `__corepkgs__`-hack for nixpkgs bootstrapping. +//! +//! All uses of [EvalIO] in tvix-cli must make use of this wrapper, +//! otherwise fundamental features like nixpkgs bootstrapping and hash +//! calculation will not work. + +use crate::KnownPaths; +use smol_str::SmolStr; +use std::cell::RefCell; +use std::io; +use std::path::{Path, PathBuf}; +use std::rc::Rc; +use tvix_eval::{EvalIO, FileType}; + +pub(crate) struct TvixIO<T: EvalIO> { + /// Ingested paths must be reported to this known paths tracker + /// for accurate build reference scanning. + known_paths: Rc<RefCell<KnownPaths>>, + + // Actual underlying [EvalIO] implementation. + actual: T, +} + +impl<T: EvalIO> TvixIO<T> { + pub(crate) fn new(known_paths: Rc<RefCell<KnownPaths>>, actual: T) -> Self { + Self { + known_paths, + actual, + } + } +} + +impl<T: EvalIO> EvalIO for TvixIO<T> { + fn store_dir(&self) -> Option<String> { + self.actual.store_dir() + } + + fn import_path(&self, path: &Path) -> Result<PathBuf, io::Error> { + let imported_path = self.actual.import_path(path)?; + self.known_paths + .borrow_mut() + .plain(imported_path.to_string_lossy()); + + Ok(imported_path) + } + + fn path_exists(&self, path: &Path) -> Result<bool, io::Error> { + if path.starts_with("/__corepkgs__") { + return Ok(true); + } + + self.actual.path_exists(path) + } + + fn read_to_string(&self, path: &Path) -> Result<String, io::Error> { + // Bundled version of corepkgs/fetchurl.nix. The counterpart + // of this happens in `main`, where the `nix_path` of the + // evaluation has `nix=/__corepkgs__` added to it. + // + // This workaround is similar to what cppnix does for passing + // the path through. + // + // TODO: this comparison is bad and allocates, we should use + // the sane path library. + if path.starts_with("/__corepkgs__/fetchurl.nix") { + return Ok(include_str!("fetchurl.nix").to_string()); + } + + self.actual.read_to_string(path) + } + + fn read_dir(&self, path: &Path) -> Result<Vec<(SmolStr, FileType)>, io::Error> { + self.actual.read_dir(path) + } +} diff --git a/tvix/crate-hashes.json b/tvix/crate-hashes.json new file mode 100644 index 000000000000..80c1718ca1c1 --- /dev/null +++ b/tvix/crate-hashes.json @@ -0,0 +1,5 @@ +{ + "test-generator 0.3.0 (git+https://github.com/JamesGuthrie/test-generator.git?rev=82e799979980962aec1aa324ec6e0e4cad781f41#82e799979980962aec1aa324ec6e0e4cad781f41)": "08brp3qqa55hijc7xby3lam2cc84hvx1zzfqv6lj7smlczh8k32y", + "tonic-mock 0.1.0 (git+https://github.com/brainrake/tonic-mock?branch=bump-dependencies#ec1a15510875de99d709d684190db5d9beab175e)": "0lwa03hpp0mxa6aa1zv5w68k61y4hccfm0q2ykyq392fwal8vb50", + "wu-manber 0.1.0 (git+https://github.com/tvlfyi/wu-manber.git#e77628cafcf45d41b9e455be86a6b1b9f46b2092)": "02byhfiw41mlgr1c43n2iq6jw5sbyn8l1acv5v71a07h5l18q0cy" +} \ No newline at end of file diff --git a/tvix/default.nix b/tvix/default.nix new file mode 100644 index 000000000000..ef0e2386db0b --- /dev/null +++ b/tvix/default.nix @@ -0,0 +1,90 @@ +# Nix helpers for projects under //tvix +{ pkgs, depot, ... }: + +let + # crate override for crates that need protobuf + protobufDep = prev: (prev.nativeBuildInputs or [ ]) ++ [ pkgs.protobuf ]; + + # Cargo dependencies to be used with nixpkgs rustPlatform functions. + cargoDeps = pkgs.rustPlatform.importCargoLock { + lockFile = ./Cargo.lock; + outputHashes = { + "test-generator-0.3.0" = "08brp3qqa55hijc7xby3lam2cc84hvx1zzfqv6lj7smlczh8k32y"; + "tonic-mock-0.1.0" = "0lwa03hpp0mxa6aa1zv5w68k61y4hccfm0q2ykyq392fwal8vb50"; + "wu-manber-0.1.0" = "02byhfiw41mlgr1c43n2iq6jw5sbyn8l1acv5v71a07h5l18q0cy"; + }; + }; +in +{ + # Load the crate2nix crate tree. + crates = import ./Cargo.nix { + inherit pkgs; + nixpkgs = pkgs.path; + + defaultCrateOverrides = pkgs.defaultCrateOverrides // { + prost-build = prev: { + nativeBuildInputs = protobufDep prev; + }; + + tonic-reflection = prev: { + nativeBuildInputs = protobufDep prev; + }; + + tvix-store = prev: { + PROTO_ROOT = depot.tvix.store.protos; + nativeBuildInputs = protobufDep prev; + }; + }; + }; + + # Run crate2nix generate in the current working directory, then + # format the generated file with depotfmt. + crate2nixGenerate = pkgs.writeShellScriptBin "crate2nix-generate" '' + ${pkgs.crate2nix}/bin/crate2nix generate + ${depot.tools.depotfmt}/bin/depotfmt Cargo.nix + ''; + + # Provide a shell for the combined dependencies of all Tvix Rust + # projects. Note that as this is manually maintained it may be + # lacking something, but it is required for some people's workflows. + # + # This shell can be entered with e.g. `mg shell //tvix:shell`. + shell = pkgs.mkShell { + name = "tvix-rust-dev-env"; + packages = [ + pkgs.buf-language-server + pkgs.cargo + pkgs.clippy + pkgs.evans + pkgs.rust-analyzer + pkgs.rustc + pkgs.rustfmt + pkgs.protobuf + ]; + }; + + # Build the Rust documentation for publishing on docs.tvix.dev. Currently only + # some crates are documented, as the crates that depend on Protobuf cause + # build failures. + rust-docs = pkgs.stdenv.mkDerivation { + inherit cargoDeps; + name = "tvix-rust-docs"; + src = depot.third_party.gitignoreSource ./.; + PROTO_ROOT = depot.tvix.store.protos; + + nativeBuildInputs = with pkgs; [ + cargo + rust-analyzer + rustPlatform.cargoSetupHook + rustc + protobuf + ]; + + buildPhase = '' + cargo doc --document-private-items + mv target/doc $out + ''; + }; + + meta.ci.targets = [ "shell" "rust-docs" ]; +} diff --git a/tvix/docs/.gitignore b/tvix/docs/.gitignore new file mode 100644 index 000000000000..77699ee8a3f7 --- /dev/null +++ b/tvix/docs/.gitignore @@ -0,0 +1,2 @@ +*.svg +*.html diff --git a/tvix/docs/Makefile b/tvix/docs/Makefile new file mode 100644 index 000000000000..ba9e2bdef6d3 --- /dev/null +++ b/tvix/docs/Makefile @@ -0,0 +1,12 @@ +all: build + +puml: + plantuml *.puml -tsvg + +html: + pandoc *.md -f markdown --self-contained -t html -s -o tvix.html --csl=${CSL} + +build: puml html + +clean: + rm -f *.tex *.pdf *.png *.svg diff --git a/tvix/docs/component-flow.puml b/tvix/docs/component-flow.puml new file mode 100644 index 000000000000..5b6d79b82313 --- /dev/null +++ b/tvix/docs/component-flow.puml @@ -0,0 +1,60 @@ +@startuml + +title Tvix build flow + +actor User +participant CLI +participant "Coordinator" as Coord +participant "Evaluator" as Eval +database Store +participant "Builder" as Build + +note over CLI,Eval + Typically runs locally on the invoking machine +end note +/ note over Store, Build + Can be either local or remote +end note + +User-->CLI: User initiates build of `hello` (analogous to `nix-build -f '<nixpkgs>' -A hello`) + +CLI-->Coord: CLI invokes coordinator + +Coord-->Eval: Sends message to start evaluation of `<nixpkgs>` (path lookup) with attribute `hello` +note right: The paths to the evaluator are local file system paths + +Coord<--Eval: Yields derivations to be built +note right + Immediately starts streaming derivations as they are instantiated across + the dependency graph so they can be built while the evaluation is still running. + + There are two types of build requests: One for regular "fire and forget" builds, + and another for IFD (import from derivation). + + These are distinct because IFD needs to be fed back into the evaluator for + further processing while a regular build does not. +end note + +loop while has more derivations + + Coord-->Store: Check if desired paths are in store + alt Store has path + Coord<--Store: Success response + else Store does not have path + Coord-->Build: Request derivation to be built + + alt Build failure + Coord<--Build: Fail response + note left: It's up to the coordinator whether to exit on build failure + else Build success + Build-->Store: Push outputs to store + Build<--Coord: Send success & pushed response + end + + end +end + +CLI<--Coord: Respond success/fail +User<--CLI: Exit success/fail + +@enduml diff --git a/tvix/docs/components.md b/tvix/docs/components.md new file mode 100644 index 000000000000..a7d61948c2fa --- /dev/null +++ b/tvix/docs/components.md @@ -0,0 +1,160 @@ +--- +title: "Tvix - Architecture & data flow" +numbersections: true +author: +- adisbladis +- flokli +- tazjin +email: +- adis@blad.is +- mail@tazj.in +lang: en-GB +classoption: +- twocolumn +header-includes: +- \usepackage{caption, graphicx, tikz, aeguill, pdflscape} +--- + +# Background + +We intend for Tvix tooling to be more decoupled than the existing, +monolithic Nix implementation. In practice, we expect to gain several +benefits from this, such as: + +- Ability to use different builders +- Ability to use different store implementations +- No monopolisation of the implementation, allowing users to replace + components that they are unhappy with (up to and including the + language evaluator) +- Less hidden intra-dependencies between tools due to explicit RPC/IPC + boundaries + +Communication between different components of the system will use +gRPC. The rest of this document outlines the components. + +# Components + +## Coordinator + +*Purpose:* The coordinator (in the simplest case, the Tvix CLI tool) +oversees the flow of a build process and delegates tasks to the right +subcomponents. For example, if a user runs the equivalent of +`nix-build` in a folder containing a `default.nix` file, the +coordinator will invoke the evaluator, pass the resulting derivations +to the builder and coordinate any necessary store interactions (for +substitution and other purposes). + +While many users are likely to use the CLI tool as their primary +method of interacting with Tvix, it is not unlikely that alternative +coordinators (e.g. for a distributed, "Nix-native" CI system) would be +implemented. To facilitate this, we are considering implementing the +coordinator on top of a state-machine model that would make it +possible to reuse the FSM logic without tying it to any particular +kind of application. + +## Evaluator + +*Purpose:* Eval takes care of evaluating Nix code. In a typical build +flow it would be responsible for producing derivations. It can also be +used as a standalone tool, for example, in use-cases where Nix is used +to generate configuration without any build or store involvement. + +*Requirements:* For now, it will run on the machine invoking the build +command itself. We give it filesystem access to handle things like +imports or `builtins.readFile`. + +To support IFD, the Evaluator also needs access to store paths. This +could be implemented by having the coordinator provide an interface to retrieve +files from a store path, or by ensuring a "realized version of the store" is +accessible by the evaluator (this could be a FUSE filesystem, or the "real" +/nix/store on disk. + +We might be okay with running the evaluator with filesystem access for now and +can extend the interface if the need arises. + +## Builder + +*Purpose:* A builder receives derivations from the coordinator and +builds them. + +By making builder a standardised interface it's possible to make the +sandboxing mechanism used by the build process pluggable. + +Nix is currently using a hard-coded +[libseccomp](https://github.com/seccomp/libseccomp) based sandboxing +mechanism and another one based on +[sandboxd](https://www.unix.com/man-page/mojave/8/sandboxd/) on macOS. +These are only separated by [compiler preprocessor +macros](https://gcc.gnu.org/onlinedocs/cpp/Ifdef.html) within the same +source files despite having very little in common with each other. + +This makes experimentation with alternative backends difficult and +porting Nix to other platforms harder than it has to be. We want to +write a new Linux builder which uses +[OCI](https://github.com/opencontainers/runtime-spec), the current +dominant Linux containerisation technology, by default. + +With a well-defined builder abstraction, it's also easy to imagine +other backends such as a Kubernetes-based one in the future. + +The environment in which builds happen is currently very Nix-specific. We might +want to avoid having to maintain all the intricacies of a Nix-specific +sandboxing environment in every builder, and instead only provide a more +generic interface, receiving build requests (and have the coordinator translate +derivations to that format). [^1] + +To build, the builder needs to be able to mount all build inputs into the build +environment. For this, it needs the store to expose a filesystem interface. + +## Store + +*Purpose:* Store takes care of storing build results. It provides a +unified interface to get store paths and upload new ones, as well as querying +for the existence of a store path and its metadata (references, signatures, …). + +Tvix natively uses an improved store protocol. Instead of transferring around +NAR files, which don't provide an index and don't allow seekable access, a +concept similar to git tree hashing is used. + +This allows more granular substitution, chunk reusage and parallel download of +individual files, reducing bandwidth usage. +As these chunks are content-addressed, it opens up the potential for +peer-to-peer trustless substitution of most of the data, as long as we sign the +root of the index. + +Tvix still keeps the old-style signatures, NAR hashes and NAR size around. In +the case of NAR hash / NAR size, this data is strictly required in some cases. +The old-style signatures are valuable for communication with existing +implementations. + +Old-style binary caches (like cache.nixos.org) can still be exposed via the new +interface, by doing on-the-fly (re)chunking/ingestion. + +Most likely, there will be multiple implementations of store, some storing +things locally, some exposing a "remote view". + +A few possible ones that come to mind are: + +- Local store +- SFTP/ GCP / S3 / HTTP +- NAR/NARInfo protocol: HTTP, S3 + +A remote Tvix store can be connected by simply connecting to its gRPC +interface, possibly using SSH tunneling, but there doesn't need to be an +additional "wire format" like the Nix `ssh(+ng)://` protocol. + +Settling on one interface allows composition of stores, meaning it becomes +possible to express substitution from remote caches as a proxy layer. + +It'd also be possible to write a FUSE implementation on top of the RPC +interface, exposing a lazily-substituting /nix/store mountpoint. Using this in +remote build context dramatically reduces the amount of data transferred to a +builder, as only the files really accessed during the build are substituted. + +# Figures + +![component flow](./component-flow.svg) + +[^1]: There have already been some discussions in the Nix community, to switch + to REAPI: + https://discourse.nixos.org/t/a-proposal-for-replacing-the-nix-worker-protocol/20926/22 diff --git a/tvix/docs/default.nix b/tvix/docs/default.nix new file mode 100644 index 000000000000..016d641df59f --- /dev/null +++ b/tvix/docs/default.nix @@ -0,0 +1,47 @@ +{ pkgs, lib, ... }: + +let + + tl = pkgs.texlive.combine { + inherit (pkgs.texlive) scheme-medium wrapfig ulem capt-of + titlesec preprint enumitem paralist ctex environ svg + beamer trimspaces zhnumber changepage framed pdfpages + fvextra minted upquote ifplatform xstring; + }; + + csl = pkgs.fetchurl { + name = "numeric.csl"; + url = "https://gist.githubusercontent.com/bwiernik/8c6f39cf51ceb3a03107/raw/1d75c2d62113ffbba6ed03a47ad99bde86934f2b/APA%2520Numeric"; + sha256 = "1yfhhnhbzvhrv93baz98frmgsx5y442nzhb0l956l4j35fb0cc3h"; + }; + +in +pkgs.stdenv.mkDerivation { + pname = "tvix-doc"; + version = "0.1"; + + outputs = [ "out" "svg" ]; + + src = lib.cleanSource ./.; + + CSL = csl; + + nativeBuildInputs = [ + pkgs.pandoc + pkgs.plantuml + tl + ]; + + installPhase = '' + runHook preInstall + + mkdir -p $out + cp -v *.html $out/ + + mkdir -p $svg + cp -v *.svg $svg/ + + runHook postSubmit + ''; + +} diff --git a/tvix/docs/lang-version.md b/tvix/docs/lang-version.md new file mode 100644 index 000000000000..c09422a5897a --- /dev/null +++ b/tvix/docs/lang-version.md @@ -0,0 +1,60 @@ +# Nix language version history + +The Nix language (“Nix”) has its own versioning mechanism independent from its +most popular implementation (“C++ Nix”): `builtins.langVersion`. It has been +increased whenever the language has changed syntactically or semantically in a +way that would not be introspectable otherwise. In particular, this does not +include addition (or removal) of `builtins`, as this can be introspected using +standard attribute set operations. + +Changes to `builtins.langVersion` are best found by viewing the git history of +C++ Nix using `git log -G 'mkInt\\(v, [0-9]\\)'` for `builtins.langVersion` < 7. +After that point `git log -G 'v\\.mkInt\\([0-9]+\\)'` should work. To reduce the +amount of false positives, specify the version number you are interested in +explicitly. + +## 1 + +The first version of the Nix language is its state at the point when +`builtins.langVersion` was added in [8b8ee53] which was first released +as part of C++ Nix 1.2. + +## 2 + +Nix version 2 changed the behavior of `builtins.storePath`: It would now [try to +substitute the given path if missing][storePath-substitute], instead of creating +an evaluation failure. `builtins.langVersion` was increased in [e36229d]. + +## 3 + +Nix version 3 changed the behavior of the `==` behavior. Strings would now be +considered [equal even if they had differing string context][equal-no-ctx]. + +## 4 + +Nix version 4 [added the float type][float] to the language. + +## 5 + +The [increase of `builtins.langVersion` to 5][langVersion-5] did not signify a +language change, but added support for structured attributes to the Nix daemon. +Eelco Dolstra writes as to what changed: + +> The structured attributes support. Unfortunately that's not so much a language +> change as a build.cc (i.e. daemon) change, but we don't really have a way to +> express that... + +Probably `builtins.nixVersion` (which was added in version 1) should have been +used instead. + +## 6 + +Nix version 6 added support for [comparing two lists][list-comparison]. + +[8b8ee53]: https://github.com/nixos/nix/commit/8b8ee53bc73769bb25d967ba259dabc9b23e2e6f +[storePath-substitute]: https://github.com/nixos/nix/commit/22d665019a3770148929b7504c73bcdbe025ec12 +[e36229d]: https://github.com/nixos/nix/commit/e36229d27f9ab508e0abf1892f3e8c263d2f8c58 +[equal-no-ctx]: https://github.com/nixos/nix/commit/ee7fe64c0ac00f2be11604a2a6509eb86dc19f0a +[float]: https://github.com/nixos/nix/commit/14ebde52893263930cdcde1406cc91cc5c42556f +[langVersion-5]: https://github.com/nixos/nix/commit/8191992c83bf4387b03c5fdaba818dc2b520462d +[list-comparison]: https://github.com/nixos/nix/commit/09471d2680292af48b2788108de56a8da755d661 diff --git a/tvix/docs/language-spec.md b/tvix/docs/language-spec.md new file mode 100644 index 000000000000..a71437493307 --- /dev/null +++ b/tvix/docs/language-spec.md @@ -0,0 +1,78 @@ +--- +title: "Specification of the Nix language" +numbersections: true +author: +- tazjin +email: +- tazjin@tvl.su +lang: en-GB +--- + +The Nix Language +================ + +WARNING: This document is a work in progress. Please keep an eye on +[`topic:nix-spec`](https://cl.tvl.fyi/q/topic:nix-spec) for ongoing +CLs. + +Nix is a general-purpose, functional programming language which this +document aims to describe. + +## Background + +Nix was designed and implemented as part of the [Nix package +manager](https://nixos.org/nix). It is primarily used for generating +so-called [*derivations*](#derivations), which are data structures +describing how to build a package. + +The language has been described in the +[thesis](https://edolstra.github.io/pubs/phd-thesis.pdf) introducing +the package manager, but only on a high-level. At the time of writing, +Nix is informally specified (via its only complete implementation in +the package manager) and there is no complete overview over its - +sometimes surprising - semantics. + +The primary project written in Nix is +[nixpkgs](https://github.com/NixOS/nixpkgs/). Uncertainties in the +process of writing this specification are resolved by investigating +patterns in nixpkgs, which we consider canonical. The code in nixpkgs +uses a reasonable subset of the features exposed by the current +implementation, some of which are *accidental*, and is thus more +useful for specifying how the language should work. + +## Introduction to Nix + +Nix is a general-purpose, partially lazy, functional programming +language which provides higher-order functions, type reflection, +primitive data types such as integers, strings and floats, and +compound data structures such as lists and attribute sets. + +Nix has syntactic sugar for common operations, such as those for +attribute sets, and also provides a wide range of built-in functions +which have organically accumulated over time. + +Nix has a variety of legacy features that are not in practical use, +but are documented in sections of this specification for the sake of +completeness. + +This document describes the syntax and abstract semantics of the Nix +language, but leaves out implementation details about how Nix can be +interpreted/compiled/analysed etc. + +### Program structure + +This section describes the semantic structure of Nix, and how it +relates to the rest of the specification. + +Each Nix program is a single [*expression*](#expressions) denoting a +[*value*](#values) (commonly a [*function*](#functions)). Each value +has a [*type*](#types), however this type is not statically known. + +Nix code is modularised through the use of the +[*import*](#builtins-import) built-in function. No separate module +system exists. + +In addition to chapters describing the building blocks mentioned +above, this specificiation also describes the [*syntax*](#syntax), the +available [built-in functions](#builtins), [*error handling*](#errors) +and known [*deficiencies*](#deficiencies) in the language. diff --git a/tvix/docs/value-pointer-equality.md b/tvix/docs/value-pointer-equality.md new file mode 100644 index 000000000000..78b1466f3a8f --- /dev/null +++ b/tvix/docs/value-pointer-equality.md @@ -0,0 +1,202 @@ +# Value Pointer Equality in Nix + +## Introduction + +It is a piece of semi-obscure Nix trivia that while functions are generally not +comparable, they can be compared in certain situations. This is actually quite an +important fact, as it is essential for the evaluation of nixpkgs: The attribute sets +used to represent platforms in nixpkgs, like `stdenv.buildPlatform`, contain functions, +such as `stdenv.buildPlatform.canExecute`. When writing cross logic, one invariably +ends up writing expressions that compare these sets, e.g. `stdenv.buildPlatform != +stdenv.hostPlatform`. Since attribute set equality is the equality of their attribute +names and values, we also end up comparing the functions within them. We can summarize +the relevant part of this behavior for platform comparisons in the following (true) +Nix expressions: + +* `stdenv.hostPlatform.canExecute != stdenv.hostPlatform.canExecute` +* `stdenv.hostPlatform == stdenv.hostPlatform` + +This fact is commonly referred to as pointer equality of functions (or function pointer +equality) which is not an entirely accurate name, as we'll see. This account of the +behavior states that, while functions are incomparable in general, they are comparable +insofar, as they occupy the same spot in an attribute set. + +However, [a maybe lesser known trick][puck-issue] is to write a function such as the +following to allow comparing functions: + +```nix +let + pointerEqual = lhs: rhs: { x = lhs; } == { x = rhs; }; + + f = name: "Hello, my name is ${name}"; + g = name: "Hello, my name is ${name}"; +in +[ + (pointerEqual f f) # => true + (pointerEqual f g) # => false +] +``` + +Here, clearly, the function is not contained at the same position in one and the same +attribute set, but at the same position in two entirely different attribute sets. We can +also see that we are not comparing the functions themselves (e.g. their AST), but +rather if they are the same individual value (i.e. pointer equal). + +So what is _actually_ going on? + +## Nix (pointer) Equality in C++ Nix + +TIP: The summary presented here is up-to-date as of 2022-11-23 and was tested with Nix 2.3 and 2.11. + +The function implementing equality in C++ Nix is `EvalState::eqValues` which starts with +[the following bit of code][eqValues-pointer-eq]: + +```cpp +bool EvalState::eqValues(Value & v1, Value & v2) +{ + forceValue(v1); + forceValue(v2); + + /* !!! Hack to support some old broken code that relies on pointer + equality tests between sets. (Specifically, builderDefs calls + uniqList on a list of sets.) Will remove this eventually. */ + if (&v1 == &v2) return true; +``` + +So this immediately looks more like pointer equality of arbitrary *values* instead of functions. In fact +there is [no special code facilitating function equality][eqValues-function-eq]: + +```cpp + /* Functions are incomparable. */ + case nFunction: + return false; +``` + +So one takeaway of this is that pointer equality is neither dependent on functions nor attribute sets. +In fact, we can also write our `pointerEqual` function as: + +```nix +lhs: rhs: [ lhs ] == [ rhs ] +``` + +It's interesting that `EvalState::eqValues` forces the left and right-hand value before trying pointer +equality. It explains that `let x = throw ""; in x == x` does not evaluate successfully, but it is puzzling why +`let f = x: x; in f == f` does not return `true`. In fact, why do we need to wrap the values in a list or +attribute set at all for our `pointerEqual` function to work? + +The answer lies in [the code that evaluates `ExprOpEq`][ExprOpEq], +i.e. an expression involving the `==` operator: + +```cpp +void ExprOpEq::eval(EvalState & state, Env & env, Value & v) +{ + Value v1; e1->eval(state, env, v1); + Value v2; e2->eval(state, env, v2); + v.mkBool(state.eqValues(v1, v2)); +} +``` + +As you can see, two _distinct_ `Value` structs are created, so they can never be pointer equal even +if the `union` inside points to the same bit of memory. We can thus understand what actually happens +when we check the equality of an attribute set (or list), by looking at the following expression: + +```nix +let + x = { name = throw "nameless"; }; +in + +x == x # => causes an evaluation error +``` + +Because `x` can't be pointer equal, as it'll end up in the distinct structs `v1` and `v2`, it needs to be compared +by value. For this reason, the `name` attribute will be forced and an evaluation error caused. +If we rewrite the expression to use… + +```nix +{ inherit x; } == { inherit x; } # => true +``` + +…, it'll work: The two attribute sets are compared by value, but their `x` attribute turns out to be pointer +equal _after_ forcing it. This does not throw, since forcing an attribute set does not force its attributes' +values (as forcing a list doesn't force its elements). + +As we have seen, pointer equality can not only be used to compare function values, but also other +otherwise incomparable values, such as lists and attribute sets that would cause an evaluation +error if they were forced recursively. We can even switch out the `throw` for an `abort`. The limitation is +of course that we need to use a value that behaves differently depending on whether it is forced +“normally” (think `builtins.seq`) or recursively (think `builtins.deepSeq`), so thunks will generally be +evaluated before pointer equality can kick into effect. + +## Summary + +When comparing two Nix values, we must force both of them (non-recursively!), but are +allowed to short-circuit the comparison based on pointer equality, i.e. if they are at +the same exact value in memory, they are deemed equal immediately. This is completely +independent of what type of value they are. If they are not pointer equal, they are +(recursively) compared by value as expected. + +However, when evaluating the Nix expression `a == b`, we *must* invoke our implementation's +value equality function in a way that `a` and `b` themselves can never be deemed pointer equal. +Any values we encounter while recursing during the equality check must be compared by +pointer as described above, though. + +## Other Comparisons + +The `!=` operator uses `EvalState::eqValues` internally as well, so it behaves exactly as `!(a == b)`. + +The `>`, `<`, `>=` and `<=` operators all desugar to [CompareValues][] eventually +which generally looks at the value type before comparing. It does, however, rely on +`EvalState::eqValues` for list comparisons, so it is possible to compare lists with +e.g. functions in them, as long as they are equal by pointer: + +```nix +let + f = x: x + 42; +in + +[ + ([ f 2 ] > [ f 1 ]) # => true + ([ f 2 ] > [ (x: x) 1]) # => error: cannot compare a function with a function +] +``` + +Finally, since `builtins.elem` relies on `EvalState::eqValues`, you can check for +a function by pointer equality: + +```nix +let + f = x: f x; +in +builtins.elem f [ f 2 3 ] # => true +``` + +## Stability of the Feature + +Keen readers will have noticed the following comment in the C++ Nix source code, +indicating that pointer comparison may be removed in the future. + +```cpp + /* !!! Hack to support some old broken code that relies on pointer + equality tests between sets. (Specifically, builderDefs calls + uniqList on a list of sets.) Will remove this eventually. */ +``` + +Now, I can't speak for the upstream C++ Nix developers, but sure can speculate. +As already pointed out, this feature is currently needed for evaluating nixpkgs. +While its use could realistically be eliminated (only bothersome spot is probably +the `emulator` function, but that should also be doable), removing the feature +would seriously compromise C++ Nix's ability to evaluate historical nixpkgs +revision which is arguably a strength of the system. + +Another indication that it is likely here to stay is that it has already +[outlived builderDefs][], even though +it was (apparently) reintroduced just for this use case. More research into +the history of this feature would still be prudent, especially the reason for +its original introduction (maybe performance?). + +[puck-issue]: https://github.com/NixOS/nix/issues/3371 +[eqValues-pointer-eq]: https://github.com/NixOS/nix/blob/05d0892443bbe92a6b6a1ee7b1d37ea05782d918/src/libexpr/eval.cc#L2342-L2350 +[eqValues-function-eq]: https://github.com/NixOS/nix/blob/05d0892443bbe92a6b6a1ee7b1d37ea05782d918/src/libexpr/eval.cc#L2405-L2407 +[ExprOpEq]: https://github.com/NixOS/nix/blob/05d0892443bbe92a6b6a1ee7b1d37ea05782d918/src/libexpr/eval.cc#L1856-L1861 +[outlived builderDefs]: https://github.com/NixOS/nixpkgs/issues/4210 +[CompareValues]: https://github.com/NixOS/nix/blob/master/src/libexpr/primops.cc#L536-L574 diff --git a/tvix/eval/.skip-subtree b/tvix/eval/.skip-subtree new file mode 100644 index 000000000000..05f9fc116f11 --- /dev/null +++ b/tvix/eval/.skip-subtree @@ -0,0 +1,2 @@ +Do not traverse further, readTree will encounter Nix language tests +and such and fail. diff --git a/tvix/eval/Cargo.toml b/tvix/eval/Cargo.toml new file mode 100644 index 000000000000..99c4fc1a1106 --- /dev/null +++ b/tvix/eval/Cargo.toml @@ -0,0 +1,60 @@ +[package] +name = "tvix-eval" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[lib] +name = "tvix_eval" + +[dependencies] +builtin-macros = { path = "./builtin-macros", package = "tvix-eval-builtin-macros" } +codemap = "0.1.3" +codemap-diagnostic = "0.1.1" +dirs = "4.0.0" +genawaiter = { version = "0.99.1", default_features = false } +imbl = { version = "2.0", features = [ "serde" ] } +lazy_static = "1.4.0" +lexical-core = { version = "0.8.5", features = ["format", "parse-floats"] } +path-clean = "0.1" +proptest = { version = "1.0.0", default_features = false, features = ["std", "alloc", "break-dead-code", "tempfile"], optional = true } +regex = "1.6.0" +rnix = "0.11.0" +rowan = "*" # pinned by rnix +serde = { version = "1.0", features = [ "rc", "derive" ] } +serde_json = "1.0" +smol_str = "0.2.0" +tabwriter = "1.2" +test-strategy = { version = "0.2.1", optional = true } +toml = "0.6.0" +xml-rs = "0.8.4" + +[dev-dependencies] +criterion = "0.4" +pretty_assertions = "1.2.1" +itertools = "0.10.3" +tempdir = "0.3.7" + +[dev-dependencies.test-generator] +# This fork of test-generator adds support for cargo workspaces, see +# also https://github.com/frehberg/test-generator/pull/14 +git = "https://github.com/JamesGuthrie/test-generator.git" +rev = "82e799979980962aec1aa324ec6e0e4cad781f41" + +[features] +default = ["impure", "arbitrary", "nix_tests"] + +# Enables running the Nix language test suite from the original C++ +# Nix implementation (at version 2.3) against Tvix. +nix_tests = [] + +# Enables operations in the VM which depend on the ability to perform I/O +impure = [] + +# Enables Arbitrary impls for internal types (required to run tests) +arbitrary = ["proptest", "test-strategy", "imbl/proptest"] + +[[bench]] +name = "eval" +harness = false diff --git a/tvix/eval/README.md b/tvix/eval/README.md new file mode 100644 index 000000000000..04bc6c3eb82b --- /dev/null +++ b/tvix/eval/README.md @@ -0,0 +1,58 @@ +Tvix Evaluator +============== + +This project implements an interpreter for the Nix programming +language. You can experiment with an online version of the evaluator: +[tvixbolt][]. + +The interpreter aims to be compatible with `nixpkgs`, on the +foundation of Nix 2.3. + +**Important note:** The evaluator is not yet feature-complete, and +while the core mechanisms (compiler, runtime, ...) have stabilised +somewhat, a lot of components are still changing rapidly. + +Please contact [TVL](https://tvl.fyi) with any questions you might +have. + +## Building tvix-eval + +Please check the `README.md` one level up for instructions on how to build this. + +The evaluator itself can also be built with standard Rust tooling (i.e. `cargo +build`). + +If you would like to clone **only** the evaluator and build it +directly with Rust tooling, you can do: + +```bash +git clone https://code.tvl.fyi/depot.git:/tvix/eval.git tvix-eval + +cd tvix-eval && cargo build +``` + +## Nix test suite + +C++ Nix implements a language test suite in the form of Nix source +code files, and their expected output. The coverage of this test suite +is not complete, but we intend to be compatible with it. + +We have ported the test suite to Tvix, but do not run it by default as +we are not yet compatible with it. + +You can run the test suite by enabling the `nix_tests` feature in +Cargo: + + cargo test --features nix_tests + +## rnix-parser + +Tvix is written in memory of jD91mZM2, the author of [rnix-parser][] +who sadly [passed away][rip]. + +Tvix makes heavy use of rnix-parser in its bytecode compiler. The +parser is now maintained by Nix community members. + +[rnix-parser]: https://github.com/nix-community/rnix-parser +[rip]: https://www.redox-os.org/news/open-source-mental-health/ +[tvixbolt]: https://tazj.in/blobs/tvixbolt/ diff --git a/tvix/eval/benches/eval.rs b/tvix/eval/benches/eval.rs new file mode 100644 index 000000000000..f85e3585f017 --- /dev/null +++ b/tvix/eval/benches/eval.rs @@ -0,0 +1,39 @@ +use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use itertools::Itertools; + +fn interpret(code: &str) { + tvix_eval::Evaluation::new(code, None).evaluate(); +} + +fn eval_literals(c: &mut Criterion) { + c.bench_function("int", |b| { + b.iter(|| { + interpret("42"); + black_box(()) + }) + }); +} + +fn eval_merge_attrs(c: &mut Criterion) { + c.bench_function("merge small attrs", |b| { + b.iter(|| { + interpret("{ a = 1; b = 2; } // { c = 3; }"); + black_box(()) + }) + }); + + c.bench_function("merge large attrs with small attrs", |b| { + let large_attrs = format!( + "{{{}}}", + (0..10000).map(|n| format!("a{n} = {n};")).join(" ") + ); + let expr = format!("{large_attrs} // {{ c = 3; }}"); + b.iter(move || { + interpret(&expr); + black_box(()) + }) + }); +} + +criterion_group!(benches, eval_literals, eval_merge_attrs); +criterion_main!(benches); diff --git a/tvix/eval/build.rs b/tvix/eval/build.rs new file mode 100644 index 000000000000..a9c9a78b060d --- /dev/null +++ b/tvix/eval/build.rs @@ -0,0 +1,9 @@ +use std::env; + +fn main() { + println!( + "cargo:rustc-env=TVIX_CURRENT_SYSTEM={}", + &env::var("TARGET").unwrap() + ); + println!("cargo:rerun-if-changed-env=TARGET") +} diff --git a/tvix/eval/builtin-macros/.gitignore b/tvix/eval/builtin-macros/.gitignore new file mode 100644 index 000000000000..eb5a316cbd19 --- /dev/null +++ b/tvix/eval/builtin-macros/.gitignore @@ -0,0 +1 @@ +target diff --git a/tvix/eval/builtin-macros/Cargo.toml b/tvix/eval/builtin-macros/Cargo.toml new file mode 100644 index 000000000000..3a35ea12a0c0 --- /dev/null +++ b/tvix/eval/builtin-macros/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "tvix-eval-builtin-macros" +version = "0.0.1" +authors = [ "Griffin Smith <root@gws.fyi>" ] +edition = "2021" + +[dependencies] +syn = { version = "1.0.57", features = ["full", "parsing", "printing", "visit", "visit-mut", "extra-traits"] } +quote = "1.0.8" +proc-macro2 = "1" + +[lib] +proc-macro = true + +[dev-dependencies] +tvix-eval = { path = "../" } diff --git a/tvix/eval/builtin-macros/src/lib.rs b/tvix/eval/builtin-macros/src/lib.rs new file mode 100644 index 000000000000..dfd0948c7d9f --- /dev/null +++ b/tvix/eval/builtin-macros/src/lib.rs @@ -0,0 +1,334 @@ +extern crate proc_macro; + +use proc_macro::TokenStream; +use proc_macro2::Span; +use quote::{quote, quote_spanned, ToTokens}; +use syn::parse::Parse; +use syn::spanned::Spanned; +use syn::{ + parse2, parse_macro_input, parse_quote, parse_quote_spanned, Attribute, FnArg, Ident, Item, + ItemMod, LitStr, Meta, Pat, PatIdent, PatType, Token, Type, +}; + +/// Description of a single argument passed to a builtin +struct BuiltinArgument { + /// The name of the argument, to be used in docstrings and error messages + name: Ident, + + /// Type of the argument. + ty: Box<Type>, + + /// Whether the argument should be forced before the underlying builtin + /// function is called. + strict: bool, + + /// Span at which the argument was defined. + span: Span, +} + +fn extract_docstring(attrs: &[Attribute]) -> Option<String> { + // Rust docstrings are transparently written pre-macro expansion into an attribute that looks + // like: + // + // #[doc = "docstring here"] + // + // Multi-line docstrings yield multiple attributes in order, which we assemble into a single + // string below. + + #[allow(dead_code)] + #[derive(Debug)] + struct Docstring { + eq: Token![=], + doc: LitStr, + } + + impl Parse for Docstring { + fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> { + Ok(Self { + eq: input.parse()?, + doc: input.parse()?, + }) + } + } + + attrs + .iter() + .filter(|attr| attr.path.get_ident().into_iter().any(|id| id == "doc")) + .filter_map(|attr| parse2::<Docstring>(attr.tokens.clone()).ok()) + .map(|docstring| docstring.doc.value()) + .reduce(|mut fst, snd| { + if snd.is_empty() { + // An empty string represents a spacing newline that was added in the + // original doc comment. + fst.push_str("\n\n"); + } else { + fst.push_str(&snd); + } + + fst + }) +} + +/// Parse arguments to the `builtins` macro itself, such as `#[builtins(state = Rc<State>)]`. +fn parse_module_args(args: TokenStream) -> Option<Type> { + if args.is_empty() { + return None; + } + + let meta: Meta = syn::parse(args).expect("could not parse arguments to `builtins`-attribute"); + let name_value = match meta { + Meta::NameValue(nv) => nv, + _ => panic!("arguments to `builtins`-attribute must be of the form `name = value`"), + }; + + if name_value.path.get_ident().unwrap().to_string() != "state" { + return None; + } + + if let syn::Lit::Str(type_name) = name_value.lit { + let state_type: Type = + syn::parse_str(&type_name.value()).expect("failed to parse builtins state type"); + return Some(state_type); + } + + panic!("state attribute must be a quoted Rust type"); +} + +/// Mark the annotated module as a module for defining Nix builtins. +/// +/// An optional type definition may be specified as an argument (e.g. `#[builtins(Rc<State>)]`), +/// which will add a parameter to the `builtins` function of that type which is passed to each +/// builtin upon instantiation. Using this, builtins that close over some external state can be +/// written. +/// +/// The type of each function is rewritten to receive a `Vec<Value>`, containing each `Value` +/// argument that the function receives. The body of functions is accordingly rewritten to "unwrap" +/// values from this vector and bind them to the correct names, so unless a static error occurs this +/// transformation is mostly invisible to users of the macro. +/// +/// A function `fn builtins() -> Vec<Builtin>` will be defined within the annotated module, +/// returning a list of [`tvix_eval::Builtin`] for each function annotated with the `#[builtin]` +/// attribute within the module. If a `state` type is specified, the `builtins` function will take a +/// value of that type. +/// +/// Each invocation of the `#[builtin]` annotation within the module should be passed a string +/// literal for the name of the builtin. +/// +/// # Examples +/// ```ignore +/// # use tvix_eval_builtin_macros::builtins; +/// # mod value { +/// # pub use tvix_eval::Builtin; +/// # } +/// +/// #[builtins] +/// mod builtins { +/// use tvix_eval::{GenCo, ErrorKind, Value}; +/// +/// #[builtin("identity")] +/// pub async fn builtin_identity(co: GenCo, x: Value) -> Result<Value, ErrorKind> { +/// Ok(x) +/// } +/// +/// // Builtins can request their argument not be forced before being called by annotating the +/// // argument with the `#[lazy]` attribute +/// +/// #[builtin("tryEval")] +/// pub async fn builtin_try_eval(co: GenCo, #[lazy] x: Value) -> Result<Value, ErrorKind> { +/// todo!() +/// } +/// } +/// ``` +#[proc_macro_attribute] +pub fn builtins(args: TokenStream, item: TokenStream) -> TokenStream { + let mut module = parse_macro_input!(item as ItemMod); + + // parse the optional state type, which users might want to pass to builtins + let state_type = parse_module_args(args); + + let (_, items) = match &mut module.content { + Some(content) => content, + None => { + return (quote_spanned!(module.span() => + compile_error!("Builtin modules must be defined in-line") + )) + .into(); + } + }; + + let mut builtins = vec![]; + for item in items.iter_mut() { + if let Item::Fn(f) = item { + if let Some(builtin_attr_pos) = f + .attrs + .iter() + .position(|attr| attr.path.get_ident().iter().any(|id| *id == "builtin")) + { + let builtin_attr = f.attrs.remove(builtin_attr_pos); + let name: LitStr = match builtin_attr.parse_args() { + Ok(args) => args, + Err(err) => return err.into_compile_error().into(), + }; + + if f.sig.inputs.len() <= 1 { + return (quote_spanned!( + f.sig.inputs.span() => + compile_error!("Builtin functions must take at least two arguments") + )) + .into(); + } + + // Inspect the first argument to determine if this function is + // taking the state parameter. + // TODO(tazjin): add a test in //tvix/eval that covers this + let mut captures_state = false; + if let FnArg::Typed(PatType { pat, .. }) = &f.sig.inputs[0] { + if let Pat::Ident(PatIdent { ident, .. }) = pat.as_ref() { + if ident.to_string() == "state" { + if state_type.is_none() { + panic!("builtin captures a `state` argument, but no state type was defined"); + } + + captures_state = true; + } + } + } + + let mut rewritten_args = std::mem::take(&mut f.sig.inputs) + .into_iter() + .collect::<Vec<_>>(); + + // Split out the value arguments from the static arguments. + let split_idx = if captures_state { 2 } else { 1 }; + let value_args = rewritten_args.split_off(split_idx); + + let builtin_arguments = value_args + .into_iter() + .map(|arg| { + let span = arg.span(); + let mut strict = true; + let (name, ty) = match arg { + FnArg::Receiver(_) => { + return Err(quote_spanned!(span => { + compile_error!("unexpected receiver argument in builtin") + })) + } + FnArg::Typed(PatType { + mut attrs, pat, ty, .. + }) => { + attrs.retain(|attr| { + attr.path.get_ident().into_iter().any(|id| { + if id == "lazy" { + strict = false; + false + } else { + true + } + }) + }); + match pat.as_ref() { + Pat::Ident(PatIdent { ident, .. }) => { + (ident.clone(), ty.clone()) + } + _ => panic!("ignored value parameters must be named, e.g. `_x` and not just `_`"), + } + } + }; + + Ok(BuiltinArgument { + strict, + span, + name, + ty, + }) + }) + .collect::<Result<Vec<BuiltinArgument>, _>>(); + + let builtin_arguments = match builtin_arguments { + Err(err) => return err.into(), + + // reverse argument order, as they are popped from the stack + // slice in opposite order + Ok(args) => args, + }; + + // Rewrite the argument to the actual function to take a + // `Vec<Value>`, which is then destructured into the + // user-defined values in the function header. + let sig_span = f.sig.span(); + rewritten_args.push(parse_quote_spanned!(sig_span=> mut values: Vec<Value>)); + f.sig.inputs = rewritten_args.into_iter().collect(); + + // Rewrite the body of the function to do said argument forcing. + // + // This is done by creating a new block for each of the + // arguments that evaluates it, and wraps the inner block. + for arg in &builtin_arguments { + let block = &f.block; + let ty = &arg.ty; + let ident = &arg.name; + + if arg.strict { + f.block = Box::new(parse_quote_spanned! {arg.span=> { + let #ident: #ty = generators::request_force(&co, values.pop() + .expect("Tvix bug: builtin called with incorrect number of arguments")).await; + + #block + }}); + } else { + f.block = Box::new(parse_quote_spanned! {arg.span=> { + let #ident: #ty = values.pop() + .expect("Tvix bug: builtin called with incorrect number of arguments"); + + #block + }}) + } + } + + let fn_name = f.sig.ident.clone(); + let arg_count = builtin_arguments.len(); + let docstring = match extract_docstring(&f.attrs) { + Some(docs) => quote!(Some(#docs)), + None => quote!(None), + }; + + if captures_state { + builtins.push(quote_spanned! { builtin_attr.span() => { + let inner_state = state.clone(); + crate::Builtin::new( + #name, + #docstring, + #arg_count, + move |values| Gen::new(|co| generators::pin_generator(#fn_name(inner_state.clone(), co, values))), + ) + }}); + } else { + builtins.push(quote_spanned! { builtin_attr.span() => { + crate::Builtin::new( + #name, + #docstring, + #arg_count, + |values| Gen::new(|co| generators::pin_generator(#fn_name(co, values))), + ) + }}); + } + } + } + } + + if let Some(state_type) = state_type { + items.push(parse_quote! { + pub fn builtins(state: #state_type) -> Vec<(&'static str, Value)> { + vec![#(#builtins),*].into_iter().map(|b| (b.name(), Value::Builtin(b))).collect() + } + }); + } else { + items.push(parse_quote! { + pub fn builtins() -> Vec<(&'static str, Value)> { + vec![#(#builtins),*].into_iter().map(|b| (b.name(), Value::Builtin(b))).collect() + } + }); + } + + module.into_token_stream().into() +} diff --git a/tvix/eval/builtin-macros/tests/tests.rs b/tvix/eval/builtin-macros/tests/tests.rs new file mode 100644 index 000000000000..735ff4672007 --- /dev/null +++ b/tvix/eval/builtin-macros/tests/tests.rs @@ -0,0 +1,45 @@ +pub use tvix_eval::{Builtin, Value}; +use tvix_eval_builtin_macros::builtins; + +#[builtins] +mod builtins { + use tvix_eval::generators::{self, Gen, GenCo}; + use tvix_eval::{ErrorKind, Value}; + + /// Test docstring. + /// + /// It has multiple lines! + #[builtin("identity")] + pub async fn builtin_identity(co: GenCo, x: Value) -> Result<Value, ErrorKind> { + Ok(todo!()) + } + + #[builtin("tryEval")] + pub async fn builtin_try_eval(co: GenCo, #[lazy] _x: Value) -> Result<Value, ErrorKind> { + todo!() + } +} + +#[test] +fn builtins() { + let builtins = builtins::builtins(); + assert_eq!(builtins.len(), 2); + + let (_, identity) = builtins + .iter() + .find(|(name, _)| *name == "identity") + .unwrap(); + + match identity { + Value::Builtin(identity) => assert_eq!( + identity.documentation(), + Some( + r#" Test docstring. + + It has multiple lines!"# + ) + ), + + _ => panic!("builtin was not a builtin"), + } +} diff --git a/tvix/eval/default.nix b/tvix/eval/default.nix new file mode 100644 index 000000000000..91661291f7b6 --- /dev/null +++ b/tvix/eval/default.nix @@ -0,0 +1,9 @@ +# TODO: find a way to build the benchmarks via crate2nix +{ depot, pkgs, ... }: + +depot.tvix.crates.workspaceMembers.tvix-eval.build.override { + runTests = true; + + # Make C++ Nix available, to compare eval results against. + testInputs = [ pkgs.nix ]; +} diff --git a/tvix/eval/docs/abandoned/thread-local-vm.md b/tvix/eval/docs/abandoned/thread-local-vm.md new file mode 100644 index 000000000000..c6a2d5e07e5c --- /dev/null +++ b/tvix/eval/docs/abandoned/thread-local-vm.md @@ -0,0 +1,233 @@ +# We can't have nice things because IFD + +The thread-local VM work below was ultimately not merged because it +was decided that it would be harmful for `tvix::eval::Value` to +implement `Eq`, `Hash`, or any of the other `std` traits. + +Implementing `std` traits on `Value` was deemed harmful because IFD +can cause arbitrary amounts of compilation to occur, including +network transactions with builders. Obviously it would be +unexpected and error-prone to have a `PartialEq::eq()` which does +something like this. This problem does not manifest within the +"nixpkgs compatibility only" scope, or in any undeprecated language +feature other than IFD. Although IFD is outside the "nixpkgs +compatibility scope", it [has been added to the TVL compatibility +scope](https://cl.tvl.fyi/c/depot/+/7193/comment/3418997b_0dbd0b65/). + +This was the sole reason for not merging. + +The explanation below may be useful in case future circumstances +affect the relevance of the reasoning above. + +The implementation can be found in these CLs: + +- [refactor(tvix/eval): remove lifetime parameter from VM<'o>](https://cl.tvl.fyi/c/depot/+/7194) +- [feat(tvix/eval): [FOUNDLING] thread-local VM](https://cl.tvl.fyi/c/depot/+/7195) +- [feat(tvix/eval): [FOUNDLING] VM::vm_xxx convenience methods](https://cl.tvl.fyi/c/depot/+/7196) +- [refactor(tvix/eval): [FOUNDLING]: drop explicit `&mut vm` parameter](https://cl.tvl.fyi/c/depot/+/7197) + +# Thread-local storage for tvix::eval::vm::VM + +## The problem + +`Value::force()` takes a `&mut VM` argument, since forcing a value +requires executing opcodes. This means that `Value::nix_eq()` too +must take a `&mut VM`, since any sensible definition of equality +will have to force thunks. + +Unfortunately Rust's `PartialEq::eq()` function does not accept any +additional arguments like this, so `Value` cannot implement +`PartialEq`. Worse, structs which *contain* `Value`s can't +implement `PartialEq` either. This means `Value`, and anything +containing it, cannot be the key for a `BTreeMap` or `HashMap`. We +can't even insert `Value`s into a `HashSet`! + +There are other situations like this that don't involve `PartialEq`, +but it's the most glaring one. The main problem is that you need a +`VM` in order to force thunks, and thunks can be anywhere in a +`Value`. + +## Solving the problem with thread-locals + +We could avoid threading the `&mut VM` through the entire codebase +by making it a thread-local. + +To do this without a performance hit, we need to use LLVM +thread-locals, which are the same cost as references to `static`s +but load relative to +[`llvm.threadlocal.address`][threadlocal-intrinsic] instead of +relative to the data segment. Unfortunately `#[thread_local]` [is +unstable][thread-local-unstable] and [unsafe in +general][thread-local-unsafe] for most of the cases where we would +want to use it. There is one [exception][tls-const-init], however: +if a `!thread_local()` has a `const` initializer, the compiler will +insert a `#[thread_local]`; this special case is both safe and +stable. + +The difficult decision is what the type of the thread-local should +be. Since you can't get a mutable reference to a `thread_local!()` +it will have to be some interior-mutability-bestowing wrapper around +our current `struct VM`. Here are the choices: + +### `RefCell<VM>` + +This is the obvious first choice, since it lets you borrow a +`RefMut<Target=VM>`. The problem here is that we want to keep the +codebase written such that all the functions in `impl VM` still take +a `&mut self`. This means that there will be an active mutable +borrow for the duration of `VM::call_builtin()`. So if we implement +`PartialEq` by having `eq()` attempt a second mutable borrow from +the thread-local storage, it will fail since there is already an +active borrow. + +The problem here is that you can't "unborrow" a `RefMut` except by +dropping it. There's no way around this. + +#### Problem: Uglification + +The only solution here is to rewrite all the functions in `impl VM` +so they don't take any kind of `self` argument, and then have them +do a short-lived `.borrow_mut()` from the thread-local `RefCell` +*separately, each time* they want to modify one of the fields of +`VM` (currently `frames`, `stack`, `with_stack`, `warnings`). This +means that if you had a code sequence like this: + +``` +impl VM { + fn foo(&mut self, ...) { + ... + self.frame().ip += 1; + self.some_other_method(); + self.frame().ip += 1; +``` + +You would need to add *two separate `borrow_mut()`s*, one for each +of the `self.frame().ip+=1` statements. You can't just do one big +`borrow_mut()` because `some_other_method()` will call +`borrow_mut()` and panic. + +#### Problem: Performance + +The `RefCell<VM>` approach also has a fairly huge performance hit, +because every single modification to any part of `VM` will require a +reference count increment/decrement, and a conditional branch based +on the check (which will never fail) that the `RefCell` isn't +already mutably borrowed. It will also impede a lot of rustc's +optimizations. + +### `Cell<VM>` + +This is a non-starter because it means that in order to mutate any +field of `VM`, you have to move the entire `struct VM` out of the +`Cell`, mutate it, and move it back in. + +### `Cell<Box<VM>>` + +Now we're getting warmer. Here, we can move the `Box<VM>` out of +the cell with a single pointer-sized memory access. + +We don't want to do the "uglification" described in the previous +section. We are very fortunate that, sometime in mid-2019, the Rust +dieties [decreed by fiat][fiat-decree] that `&Cell<T>` and `&mut T` +are bit-for-bit identical, and even gave us mortals safe wrappers +[`from_mut()`][from_mut] and [`get_mut()`][get_mut] around +`mem::transmute()`. + +So now, when a `VM` method (which takes `&mut self`) calls out to +some external code (like a builtin), instead of passing the `&mut +self` to the external code it can call `Cell::from_mut(&mut self)`, +and then `Cell::swap()` that into the thread-local storage cell for +the duration of the external code. After the external code returns, +it can `Cell::swap()` it back. This whole dance gets wrapped in a +lexical block, and the borrow checker sees that the `&Cell<Box<VM>>` +returned by `Cell::from_mut()` lives only until the end of the +lexical block, *so we get the `&mut self` back after the close-brace +for that block*. NLL FTW. This sounds like a lot of work, but it +should compile down to two pointer-sized loads and two pointer-sized +stores, and it is incurred basically only for `OpBuiltin`. + +This all works, with only two issues: + +1. `vm.rs` needs to be very careful to do the thread-local cell swap + dance before calling anything that might call `PartialEq::eq()` + (or any other method that expects to be able to pull the `VM` out + of thread-local storage). There is no compile-time check that we + did the dance in all the right places. If we forget to do the + dance somewhere we'll get a runtime panic from `Option::expect()` + (see next section). + +2. Since we need to call `Cell::from_mut()` on a `Box<VM>` rather + than a bare `VM`, we still need to rewrite all of `vm.rs` so that + every function takes a `&mut Box<VM>` instead of a `&mut self`. + This creates a huge amount of "noise" in the code. + +Fortunately, it turns out that nearly all the "noise" that arises +from the second point can be eliminated by taking advantage of +[deref coercions][deref-coercions]! This was the last "shoe to +drop". + +There is still the issue of having to be careful about calls from +`vm.rs` to things outside that file, but it's manageable. + +### `Cell<Option<Box<VM>>>` + +In order to get the "safe and stable `#[thread_local]`" +[exception][tls-const-init] we need a `const` initializer, which +means we need to be able to put something into the `Cell` that isn't +a `VM`. So the type needs to be `Cell<Option<Box<VM>>>`. + +Recall that you can't turn an `Option<&T>` into an `&Option<T>`. +The latter type has the "is this a `Some` or `None`" bit immediately +adjacent to the bits representing `T`. So if I hand you a `t:&T` +and you wrap it as `Some(t)`, those bits aren't adjacent in memory. +This means that all the VM methods need to operate on an +`Option<Box<VM>>` -- we can't just wrap a `Some()` around `&mut +self` "at the last minute" before inserting it into the thread-local +storage cell. Fortunately deref coercions save the day here too -- +the coercion is inferred through both layers (`Box` and `Option`) of +wrapper, so there is no additional noise in the code. + +Note that Rust is clever and can find some sequence of bits that +aren't a valid `T`, so `sizeof(Option<T>)==sizeof(T)`. And in fact, +`Box<T>` is one of these cases (and this is guaranteed). So the +`Option` has no overhead. + +# Closing thoughts, language-level support + +This would have been easier with language-level support. + +## What wouldn't help + +Although it [it was decreed][fiat-decree] that `Cell<T>` and `&mut +T` are interchangeable, a `LocalKey<Cell<T>>` isn't quite the same +thing as a `Cell<T>`, so it wouldn't be safe for the standard +library to contain something like this: + +``` +impl<T> LocalKey<Cell<T>> { + fn get_mut(&self) -> &mut T { + unsafe { + // ... mem::transmute() voodoo goes here ... +``` + +The problem here is that you can call `LocalKey<Cell<T>>::get_mut()` twice and +end up with two `&mut T`s that point to the same thing (mutable aliasing) which +results in undefined behavior. + +## What would help + +The ideal solution is for Rust to let you call arbitrary methods +`T::foo(&mut self...)` on a `LocalKey<Cell<T>>`. This way you can +have one (and only one) `&mut T` at any syntactical point in your +program -- the `&mut self`. + + +[tls-const-init]: https://github.com/rust-lang/rust/pull/90774 +[thread-local-unstable]: https://github.com/rust-lang/rust/issues/29594 +[thread-local-unsafe-generally]: https://github.com/rust-lang/rust/issues/54366 +[fiat-decree]: https://github.com/rust-lang/rust/issues/43038 +[from_mut]: https://doc.rust-lang.org/stable/std/cell/struct.Cell.html#method.from_mut +[get_mut]: https://doc.rust-lang.org/stable/std/cell/struct.Cell.html#method.get_mut +[thread-local-unsafe]: [https://github.com/rust-lang/rust/issues/54366] +[deref-coercions]: https://doc.rust-lang.org/book/ch15-02-deref.html#implicit-deref-coercions-with-functions-and-methods +[threadlocal-intrinsic]: https://llvm.org/docs/LangRef.html#llvm-threadlocal-address-intrinsic diff --git a/tvix/eval/docs/build-references.md b/tvix/eval/docs/build-references.md new file mode 100644 index 000000000000..788b25c2be6c --- /dev/null +++ b/tvix/eval/docs/build-references.md @@ -0,0 +1,175 @@ +Build references in derivations +=============================== + +This document describes how build references are calculated in Tvix. Build +references are used to determine which store paths should be available to a +builder during the execution of a build (i.e. the full build closure of a +derivation). + +## String contexts in C++ Nix + +In C++ Nix, each string value in the evaluator carries an optional so-called +"string context". + +These contexts are themselves a list of strings that take one of the following +formats: + +1. `!<output_name>!<drv_path>` + + This format describes a build reference to a specific output of a derivation. + +2. `=<drv_path>` + + This format is used for a special case where a derivation attribute directly + refers to a derivation path (e.g. by accessing `.drvPath` on a derivation). + + Note: In C++ Nix this case is quite special and actually requires a + store-database query during evaluation. + +3. `<path>` - a non-descript store path input, usually a plain source file (e.g. + from something like `src = ./.` or `src = ./foo.txt`). + + In the case of `unsafeDiscardOutputDependency` this is used to pass a raw + derivation file, but *not* pull in its outputs. + +Lets introduce names for these (in the same order) to make them easier to +reference below: + +```rust +enum BuildReference { + /// !<output_name>!<drv_path> + SingleOutput(OutputName, DrvPath), + + /// =<drv_path> + DrvClosure(DrvPath), + + /// <path> + Path(StorePath), +} +``` + +String contexts are, broadly speaking, created whenever a string is the result +of a computation (e.g. string interpolation) that used a *computed* path or +derivation in any way. + +Note: This explicitly does *not* include simply writing a literal string +containing a store path (whether valid or not). That is only permitted through +the `storePath` builtin. + +## Derivation inputs + +Based on the data above, the fields `inputDrvs` and `inputSrcs` of derivations +are populated in `builtins.derivationStrict` (the function which +`builtins.derivation`, which isn't actually a builtin, wraps). + +`inputDrvs` is represented by a map of derivation paths to the set of their +outputs that were referenced by the context. + +TODO: What happens if the set is empty? Somebody claimed this means all outputs. + +`inputSrcs` is represented by a set of paths. + +These are populated by the above references as follows: + +* `SingleOutput` entries are merged into `inputDrvs` +* `Path` entries are inserted into `inputSrcs` +* `DrvClosure` leads to a special store computation (`computeFSClosure`), which + finds all paths referenced by the derivation and then inserts all of them into + the fields as above (derivations with _all_ their outputs) + +This is then serialised in the derivation and passed down the pipe. + +## Builtins interfacing with contexts + +C++ Nix has several builtins that interface directly with string contexts: + +* `unsafeDiscardStringContext`: throws away a string's string context (if + present) +* `hasContext`: returns `true`/`false` depending on whether the string has + context +* `unsafeDiscardOutputDependency`: drops dependencies on the *outputs* of a + `.drv` in the context, passing only the literal `.drv` itself + + Note: This is only used for special test-cases in nixpkgs, and deprecated Nix + commands like `nix-push`. +* `getContext`: returns the string context in serialised form as a Nix attribute + set +* `appendContext`: adds a given string context to the string in the same format + as returned by `getContext` + +## Placeholders + +C++ Nix has `builtins.placeholder`, which given the name of an output (e.g. +`out`) creates a hashed string representation of that output name. If that +string is used anywhere in input attributes, the builder will replace it with +the actual name of the corresponding output of the current derivation. + +C++ Nix does not use contexts for this, it blindly creates a rewrite map of +these placeholder strings to the names of all outputs, and runs the output +replacement logic on all environment variables it creates, attribute files it +passes etc. + +## Tvix & string contexts + +Tvix does not track string contexts in its evaluator at all. Instead we are +investigating implementing a system which allows us to drop string contexts in +favour of reference scanning derivation attributes. + +This means that instead of maintaining and passing around a string context data +structure in eval, we maintain a data structure of *known paths* from the same +evaluation elsewhere in Tvix, and scan each derivation attribute against this +set of known paths when instantiating derivations. + +Until proven otherwise, we take the stance that the system of string contexts as +implemented in C++ Nix is likely an implementation detail that should not be +leaking to the language surface as it does now. + +### Tracking "known paths" + +Every time a Tvix evaluation does something that causes a store interaction, a +"known path" is created. On the language surface, this is the result of one of: + +1. Path literals (e.g. `src = ./.`). +2. Calls to `builtins.derivationStrict` yielding a derivation and its output + paths. +3. Calls to `builtins.path`. + +Whenever one of these occurs, some metadata that persists for the duration of +one evaluation should be created in Nix. This metadata needs to be available in +`builtins.derivationStrict`, and should be able to respond to these queries: + +1. What is the set of all known paths? (used for e.g. instantiating an + Aho-Corasick type string searcher) +2. What is the _type_ of a path? (derivation path, derivation output, source + file) +3. What are the outputs of a derivation? +4. What is the derivation of an output? + +These queries will need to be asked of the metadata when populating the +derivation fields. + +Note: Depending on how we implement `builtins.placeholder`, it might be useful +to track created placeholders in this metadata, too. + +### Context builtins + +Context-reading builtins can be implemented in Tvix by adding `hasContext` and +`getContext` with the appropriate reference-scanning logic. However, we should +evaluate how these are used in nixpkgs and whether their uses can be removed. + +Context-mutating builtins can be implemented by tracking their effects in the +value representation of Tvix, however we should consider not doing this at all. + +`unsafeDiscardOutputDependency` should probably never be used and we should warn +or error on it. + +`unsafeDiscardStringContext` is often used as a workaround for avoiding IFD in +inconvenient places (e.g. in the TVL depot pipeline generation). This is +unnecessary in Tvix. We should evaluate which other uses exist, and act on them +appropriately. + +The initial danger with diverging here is that we might cause derivation hash +discrepancies between Tvix and C++ Nix, which can make initial comparisons of +derivations generated by the two systems difficult. If this occurs we need to +discuss how to approach it, but initially we will implement the mutating +builtins as no-ops. diff --git a/tvix/eval/docs/builtins.md b/tvix/eval/docs/builtins.md new file mode 100644 index 000000000000..00af50484903 --- /dev/null +++ b/tvix/eval/docs/builtins.md @@ -0,0 +1,138 @@ +Nix builtins +============ + +Nix has a lot of built-in functions, some of which are accessible in +the global scope, and some of which are only accessible through the +global `builtins` attribute set. + +This document is an attempt to track all of these builtins, but +without documenting their functionality. + +See also https://nixos.org/manual/nix/stable/expressions/builtins.html + +The `impl` column indicates implementation status in tvix: +- implemented: "" (empty cell) +- not yet implemented, but not blocked: `todo` +- not yet implemented, but blocked by other prerequisites: + - `store`: awaiting eval<->store api(s) + - `context`: awaiting support for string contexts + +| name | global | arity | pure | impl | +|-------------------------------|--------|-------|-------|---------| +| abort | true | 1 | | | +| add | false | 2 | true | | +| addErrorContext | false | ? | | context | +| all | false | 2 | true | | +| any | false | 2 | true | | +| appendContext | false | ? | | context | +| attrNames | false | 1 | true | | +| attrValues | false | | true | | +| baseNameOf | true | | | | +| bitAnd | false | | | | +| bitOr | false | | | | +| bitXor | false | | | | +| builtins | true | | | | +| catAttrs | false | | | | +| compareVersions | false | | | | +| concatLists | false | | | | +| concatMap | false | | | | +| concatStringsSep | false | | | | +| currentSystem | false | | | | +| currentTime | false | | false | | +| deepSeq | false | | | | +| derivation | true | | | store | +| derivationStrict | true | | | store | +| dirOf | true | | | | +| div | false | | | | +| elem | false | | | | +| elemAt | false | | | | +| false | true | | | | +| fetchGit | true | | | store | +| fetchMercurial | true | | | store | +| fetchTarball | true | | | store | +| fetchurl | false | | | store | +| filter | false | | | | +| filterSource | false | | | store | +| findFile | false | | false | todo | +| foldl' | false | | | | +| fromJSON | false | | | | +| fromTOML | true | | | | +| functionArgs | false | | | | +| genList | false | | | | +| genericClosure | false | | | todo | +| getAttr | false | | | | +| getContext | false | | | context | +| getEnv | false | | false | | +| hasAttr | false | | | | +| hasContext | false | | | context | +| hashFile | false | | false | todo | +| hashString | false | | | todo | +| head | false | | | | +| import | true | | | | +| intersectAttrs | false | | | | +| isAttrs | false | | | | +| isBool | false | | | | +| isFloat | false | | | | +| isFunction | false | | | | +| isInt | false | | | | +| isList | false | | | | +| isNull | true | | | | +| isPath | false | | | | +| isString | false | | | | +| langVersion | false | | | | +| length | false | | | | +| lessThan | false | | | | +| listToAttrs | false | | | | +| map | true | | | | +| mapAttrs | false | | | | +| match | false | | | | +| mul | false | | | | +| nixPath | false | | | todo | +| nixVersion | false | | | todo | +| null | true | | | | +| parseDrvName | false | | | | +| partition | false | | | | +| path | false | | sometimes | store | +| pathExists | false | | false | | +| placeholder | true | | | context | +| readDir | false | | false | | +| readFile | false | | false | | +| removeAttrs | true | | | | +| replaceStrings | false | | | | +| scopedImport | true | | | | +| seq | false | | | | +| sort | false | | | | +| split | false | | | | +| splitVersion | false | | | | +| storeDir | false | | | store | +| storePath | false | | | store | +| stringLength | false | | | | +| sub | false | | | | +| substring | false | | | | +| tail | false | | | | +| throw | true | | | | +| toFile | false | | | store | +| toJSON | false | | | todo | +| toPath | false | | | | +| toString | true | | | | +| toXML | false | | | todo | +| trace | false | | | | +| true | true | | | | +| tryEval | false | | | | +| typeOf | false | | | | +| unsafeDiscardOutputDependency | false | | | context | +| unsafeDiscardStringContext | false | | | context | +| unsafeGetAttrPos | false | | | todo | +| valueSize | false | | | todo | + +## Added after C++ Nix 2.3 (without Flakes enabled) + +| name | global | arity | pure | impl | +|---------------|--------|-------|-------|-------| +| break | false | 1 | | todo | +| ceil | false | 1 | true | | +| fetchTree | true | 1 | | todo | +| floor | false | 1 | true | | +| groupBy | false | 2 | true | | +| traceVerbose | false | 2 | | todo | +| zipAttrsWith | false | 2 | true | todo | diff --git a/tvix/eval/docs/known-optimisation-potential.md b/tvix/eval/docs/known-optimisation-potential.md new file mode 100644 index 000000000000..f45f1ee6c48a --- /dev/null +++ b/tvix/eval/docs/known-optimisation-potential.md @@ -0,0 +1,130 @@ +Known Optimisation Potential +============================ + +There are several areas of the Tvix evaluator code base where +potentially large performance gains can be achieved through +optimisations that we are already aware of. + +The shape of most optimisations is that of moving more work into the +compiler to simplify the runtime execution of Nix code. This leads, in +some cases, to drastically higher complexity in both the compiler +itself and in invariants that need to be guaranteed between the +runtime and the compiler. + +For this reason, and because we lack the infrastructure to adequately +track their impact (WIP), we have not yet implemented these +optimisations, but note the most important ones here. + +* Use "open upvalues" [hard] + + Right now, Tvix will immediately close over all upvalues that are + created and clone them into the `Closure::upvalues` array. + + Instead of doing this, we can statically determine most locals that + are closed over *and escape their scope* (similar to how the + `compiler::scope::Scope` struct currently tracks whether locals are + used at all). + + If we implement the machinery to track this, we can implement some + upvalues at runtime by simply sticking stack indices in the upvalue + array and only copy the values where we know that they escape. + +* Avoid `with` value duplication [easy] + + If a `with` makes use of a local identifier in a scope that can not + close before the with (e.g. not across `LambdaCtx` boundaries), we + can avoid the allocation of the phantom value and duplication of the + `NixAttrs` value on the stack. In this case we simply push the stack + index of the known local. + +* Multiple attribute selection [medium] + + An instruction could be introduced that avoids repeatedly pushing an + attribute set to/from the stack if multiple keys are being selected + from it. This occurs, for example, when inheriting from an attribute + set or when binding function formals. + +* Split closure/function representation [easy] + + Functions have fewer fields that need to be populated at runtime and + can directly use the `value::function::Lambda` representation where + possible. + +* Apply `compiler::optimise_select` to other set operations [medium] + + In addition to selects, statically known attribute resolution could + also be used for things like `?` or `with`. The latter might be a + little more complicated but is worth investigating. + +* Inline fully applied builtins with equivalent operators [medium] + + Some `builtins` have equivalent operators, e.g. `builtins.sub` + corresponds to the `-` operator, `builtins.hasAttr` to the `?` + operator etc. These operators additionally compile to a primitive + VM opcode, so they should be just as cheap (if not cheaper) as + a builtin application. + + In case the compiler encounters a fully applied builtin (i.e. + no currying is occurring) and the `builtins` global is unshadowed, + it could compile the equivalent operator bytecode instead: For + example, `builtins.sub 20 22` would be compiled as `20 - 22`. + This would ensure that equivalent `builtins` can also benefit + from special optimisations we may implement for certain operators + (in the absence of currying). E.g. we could optimise access + to the `builtins` attribute set which a call to + `builtins.getAttr "foo" builtins` should also profit from. + +* Avoid nested `VM::run` calls [hard] + + Currently when encountering Nix-native callables (thunks, closures) + the VM's run loop will nest and return the value of the nested call + frame one level up. This makes the Rust call stack almost mirror the + Nix call stack, which is usually undesirable. + + It is possible to detect situations where this is avoidable and + instead set up the VM in such a way that it continues and produces + the desired result in the same run loop, but this is kind of tricky + to get right - especially while other parts are still in flux. + + For details consult the commit with Gerrit change ID + `I96828ab6a628136e0bac1bf03555faa4e6b74ece`, in which the initial + attempt at doing this was reverted. + +* Avoid thunks if only identifier closing is required [medium] + + Some constructs, like `with`, mostly do not change runtime behaviour + if thunked. However, they are wrapped in thunks to ensure that + deferred identifiers are resolved correctly. + + This can be avoided, as we statically analyse the scope and should + be able to tell whether any such logic was required. + +* Intern literals [easy] + + Currently, the compiler emits a separate entry in the constant + table for each literal. So the program `1 + 1 + 1` will have + three entries in its `Chunk::constants` instead of only one. + +* Do some list and attribute set operations in place [hard] + + Algorithms that can not do a lot of work inside `builtins` like `map`, + `filter` or `foldl'` usually perform terribly if they use data structures like + lists and attribute sets. + + `builtins` can do work in place on a copy of a `Value`, but naïvely expressed + recursive algorithms will usually use `//` and `++` to do a single change to a + `Value` at a time, requiring a full copy of the data structure each time. + It would be a big improvement if we could do some of these operations in place + without requiring a new copy. + + There are probably two approaches: We could determine statically if a value is + reachable from elsewhere and emit a special in place instruction if not. An + easier alternative is probably to rely on reference counting at runtime: If no + other reference to a value exists, we can extend the list or update the + attribute set in place. + + An **alternative** to this is using [persistent data + structures](https://en.wikipedia.org/wiki/Persistent_data_structure) or at the + very least [immutable data structures](https://docs.rs/im/latest/im/) that can + be copied more efficiently than the stock structures we are using at the + moment. diff --git a/tvix/eval/docs/language-issues.md b/tvix/eval/docs/language-issues.md new file mode 100644 index 000000000000..26401665bbb5 --- /dev/null +++ b/tvix/eval/docs/language-issues.md @@ -0,0 +1,46 @@ +# Nix language issues + +In the absence of a language standard, what Nix (the language) is, is prescribed +by the behavior of the C++ Nix implementation. Still, there are reasons not to +accept some behavior: + +* Tvix aims for nixpkgs compatibility only. This means we can ignore behavior in + edge cases nixpkgs doesn't trigger as well as obscure features it doesn't use + (e.g. `__overrides`). +* Some behavior of the Nix evaluator seems to be unintentional or an + implementation detail leaking out into language behavior. + +Especially in the latter case, it makes sense to raise the respective issue and +maybe to get rid of the behavior in all implementations for good. Below is an +(incomplete) list of such issues: + +* [Behaviour of nested attribute sets depends on definition order][i7111] +* [Partially constructed attribute sets are observable during dynamic attr names construction][i7012] +* [Nix parsers merges multiple attribute set literals for the same key incorrectly depending on definition order](i7115) + +On the other hand, there is behavior that seems to violate one's expectation +about the language at first, but has good enough reasons from an implementor's +perspective to keep them: + +* Dynamic keys are forbidden in `let` and `inherit`. This makes sure that we + only need to do runtime identifier lookups for `with`. More dynamic (i.e. + runtime) lookups would make the scoping system even more complicated as well + as hurt performance. +* Dynamic attributes of `rec` sets are not added to its scope. This makes sense + for the same reason. +* Dynamic and nested attributes in attribute sets don't get merged. This is a + tricky one, but avoids doing runtime (recursive) merges of attribute sets. + Instead all necessary merging can be inferred statically, i.e. the C++ Nix + implementation already merges at parse time, making nested attribute keys + syntactic sugar effectively. + +Other behavior is just odd, surprising or underdocumented: + +* `builtins.foldl'` doesn't force the initial accumulator (but all other + intermediate accumulator values), differing from e.g. Haskell, see + the [relevant PR discussion](p7158). + +[i7111]: https://github.com/NixOS/nix/issues/7111 +[i7012]: https://github.com/NixOS/nix/issues/7012 +[i7115]: https://github.com/NixOS/nix/issues/7115 +[p7158]: https://github.com/NixOS/nix/pull/7158 diff --git a/tvix/eval/docs/opcodes-attrsets.md b/tvix/eval/docs/opcodes-attrsets.md new file mode 100644 index 000000000000..7026f3319dda --- /dev/null +++ b/tvix/eval/docs/opcodes-attrsets.md @@ -0,0 +1,122 @@ +# attrset-opcodes + +The problem with attrset literals is twofold: + +1. The keys of attribute sets may be dynamically evaluated. + + Access: + + ```nix + let + k = "foo"; + attrs = { /* etc. */ }; + in attrs."${k}" + ``` + + Literal: + ```nix + let + k = "foo"; + in { + "${k}" = 42; + } + ``` + + The problem with this is that the attribute set key is not known at + compile time, and needs to be dynamically evaluated by the VM as an + expression. + + For the most part this should be pretty simple, assuming a + theoretical instruction set: + + ``` + 0000 OP_CONSTANT(0) # key "foo" + 0001 OP_CONSTANT(1) # value 42 + 0002 OP_ATTR_SET(1) # construct attrset from 2 stack values + ``` + + The operation pushing the key needs to be replaced with one that + leaves a single value (the key) on the stack, i.e. the code for the + expression, e.g.: + + ``` + 0000..000n <operations leaving a string value on the stack> + 000n+1 OP_CONSTANT(1) # value 42 + 000n+2 OP_ATTR_SET(1) # construct attrset from 2 stack values + ``` + + This is fairly easy to do by simply recursing in the compiler when + the key expression is encountered. + +2. The keys of attribute sets may be nested. + + This is the non-trivial part of dealing with attribute set + literals. Specifically, the nesting can be arbitrarily deep and the + AST does not guarantee that related set keys are located + adjacently. + + Furthermore, this frequently occurs in practice in Nix. We need a + bytecode representation that makes it possible to construct nested + attribute sets at runtime. + + Proposal: AttrPath values + + If we can leave a value representing an attribute path on the + stack, we can offload the construction of nested attribute sets to + the `OpAttrSet` operation. + + Under the hood, OpAttrSet in practice constructs a `Map<NixString, + Value>` attribute set in most cases. This means it expects to pop + the value of the key of the stack, but is otherwise free to do + whatever it wants with the underlying map. + + In a simple example, we could have code like this: + + ```nix + { + a.b = 15; + } + ``` + + This would be compiled to a new `OpAttrPath` instruction that + constructs and pushes an attribute path from a given number of + fragments (which are popped off the stack). + + For example, + + ``` + 0000 OP_CONSTANT(0) # key "a" + 0001 OP_CONSTANT(1) # key "b" + 0002 OP_ATTR_PATH(2) # construct attrpath from 2 fragments + 0003 OP_CONSTANT(2) # value 42 + 0004 OP_ATTRS(1) # construct attrset from one pair + ``` + + Right before `0004` the stack would be left like this: + + [ AttrPath[a,b], 42 ] + + Inside of the `OP_ATTRS` instruction we could then begin + construction of the map and insert the nested attribute sets as + required, as well as validate that there are no duplicate keys. + +3. Both of these cases can occur simultaneously, but this is not a + problem as the opcodes combine perfectly fine, e.g.: + + ```nix + let + k = "a"; + in { + "${k}".b = 42; + } + ``` + + results in + + ``` + 0000..000n <operations leaving a string value on the stack> + 000n+1 OP_CONSTANT(1) # key "b" + 000n+2 OP_ATTR_PATH(2) # construct attrpath from 2 fragments + 000n+3 OP_CONSTANT(2) # value 42 + 000n+4 OP_ATTR_SET(1) # construct attrset from 2 stack values + ``` diff --git a/tvix/eval/docs/recursive-attrs.md b/tvix/eval/docs/recursive-attrs.md new file mode 100644 index 000000000000..c30cfd33e6c7 --- /dev/null +++ b/tvix/eval/docs/recursive-attrs.md @@ -0,0 +1,68 @@ +Recursive attribute sets +======================== + +The construction behaviour of recursive attribute sets is very +specific, and a bit peculiar. + +In essence, there are multiple "phases" of scoping that take place +during attribute set construction: + +1. Every inherited value without an explicit source is inherited only + from the **outer** scope in which the attribute set is enclosed. + +2. A new scope is opened in which all recursive keys are evaluated. + This only considers **statically known keys**, attributes can + **not** recurse into dynamic keys in `self`! + + For example, this code is invalid in C++ Nix: + + ``` + nix-repl> rec { ${"a"+""} = 2; b = a * 10; } + error: undefined variable 'a' at (string):1:26 + ``` + +3. Finally, a third scope is opened in which dynamic keys are + evaluated. + +This behaviour, while possibly a bit strange and unexpected, actually +simplifies the implementation of recursive attribute sets in Tvix as +well. + +Essentially, a recursive attribute set like this: + +```nix +rec { + inherit a; + b = a * 10; + ${"c" + ""} = b * 2; +} +``` + +Can be compiled like the following expression: + +```nix +let + inherit a; +in let + b = a * 10; + in { + inherit a b; + ${"c" + ""} = b * 2; + } +``` + +Completely deferring the resolution of recursive identifiers to the +existing handling of recursive scopes (i.e. deferred access) in let +bindings. + +In practice, we can further specialise this and compile each scope +directly into the form expected by `OpAttrs` (that is, leaving +attribute names on the stack) before each value's position. + +C++ Nix's Implementation +------------------------ + +* [`ExprAttrs`](https://github.com/NixOS/nix/blob/2097c30b08af19a9b42705fbc07463bea60dfb5b/src/libexpr/nixexpr.hh#L241-L268) + (AST representation of attribute sets) +* [`ExprAttrs::eval`](https://github.com/NixOS/nix/blob/075bf6e5565aff9fba0ea02f3333c82adf4dccee/src/libexpr/eval.cc#L1333-L1414) +* [`addAttr`](https://github.com/NixOS/nix/blob/master/src/libexpr/parser.y#L98-L156) (`ExprAttrs` construction in the parser) diff --git a/tvix/eval/docs/vm-loop.md b/tvix/eval/docs/vm-loop.md new file mode 100644 index 000000000000..6266d34709cb --- /dev/null +++ b/tvix/eval/docs/vm-loop.md @@ -0,0 +1,315 @@ +tvix-eval VM loop +================= + +This document describes the new tvix-eval VM execution loop implemented in the +chain focusing around cl/8104. + +## Background + +The VM loop implemented in Tvix prior to cl/8104 had several functions: + +1. Advancing the instruction pointer for a chunk of Tvix bytecode and + executing instructions in a loop until a result was yielded. + +2. Tracking Nix call frames as functions/thunks were entered/exited. + +3. Catching trampoline requests returned from instructions to force suspended + thunks without increasing stack size *where possible*. + +4. Handling trampolines through an inner trampoline loop, switching between a + code execution mode and execution of subsequent trampolines. + +This implementation of the trampoline logic was added on to the existing VM, +which previously always recursed for thunk forcing. There are some cases (for +example values that need to be forced *inside* of the execution of a builtin) +where trampolines could not previously be used, and the VM recursed anyways. + +As a result of this trampoline logic being added "on top" of the existing VM +loop the code became quite difficult to understand. This led to several bugs, +for example: b/251, b/246, b/245, and b/238. + +These bugs were tricky to deal with, as we had to try and make the VM do +things that are somewhat difficult to fit into its model. We could of course +keep extending the trampoline logic to accommodate all sorts of concepts (such +as finalisers), but that seems like it does not solve the root problem. + +## New VM loop + +In cl/8104, a unified new solution is implemented with which the VM is capable +of evaluating everything without increasing the call stack size. + +This is done by introducing a new frame stack in the VM, on which execution +frames are enqueued that are either: + +1. A bytecode frame, consisting of Tvix bytecode that evaluates compiled Nix + code. +2. A generator frame, consisting of some VM logic implemented in pure Rust + code that can be *suspended* when it hits a point where the VM would + previously need to recurse. + +We do this by making use of the `async` *keyword* in Rust, but notably +*without* introducing asynchronous I/O or concurrency in tvix-eval (the +complexity of which is currently undesirable for us). + +Specifically, when writing a Rust function that uses the `async` keyword, such +as: + +```rust +async fn some_builtin(input: Value) -> Result<Value, ErrorKind> { + let mut out = NixList::new(); + + for element in input.to_list()? { + let result = do_something_that_requires_the_vm(element).await; + out.push(result); + } + + Ok(out) +} +``` + +The compiler actually generates a state-machine under-the-hood which allows +the execution of that function to be *suspended* whenever it hits an `await`. + +We use the [`genawaiter`][] crate that gives us a data structure and simple +interface for getting instances of these state machines that can be stored in +a struct (in our case, a *generator frame*). + +The execution of the VM then becomes the execution of an *outer loop*, which +is responsible for selecting the next generator frame to execute, and two +*inner loops*, which drive the execution of a bytecode frame or generator +frame forward until it either yields a value or asks to be suspended in favour +of another frame. + +All "communication" between frames happens solely through values left on the +stack: Whenever a frame of either type runs to completion, it is expected to +leave a *single* value on the stack. It follows that the whole VM, upon +completion of the last (or initial, depending on your perspective) frame +yields its result as the return value. + +The core of the VM restructuring is cl/8104, unfortunately one of the largest +single commit changes we've had to make yet, as it touches pretty much all +areas of tvix-eval. The introduction of the generators and the +message/response system we built to request something from the VM, suspend a +generator, and wait for the return is in cl/8148. + +The next sections describe in detail how the three different loops work. + +### Outer VM loop + +The outer VM loop is responsible for selecting the next frame to run, and +dispatching it correctly to inner loops, as well as determining when to shut +down the VM and return the final result. + +``` + ╭──────────────────╮ + ╭────────┤ match frame kind ├──────╮ + │ ╰──────────────────╯ │ + │ │ + ┏━━━━━━━━━━━━┷━━━━━┓ ╭───────────┴───────────╮ +───►┃ frame_stack.pop()┃ ▼ ▼ + ┗━━━━━━━━━━━━━━━━━━┛ ┏━━━━━━━━━━━━━━━━┓ ┏━━━━━━━━━━━━━━━━━┓ + ▲ ┃ bytecode frame ┃ ┃ generator frame ┃ + │ ┗━━━━━━━━┯━━━━━━━┛ ┗━━━━━━━━┯━━━━━━━━┛ + │[yes, cont.] │ │ + │ ▼ ▼ + ┏━━━━━━━━┓ │ ╔════════════════╗ ╔═════════════════╗ +◄───┨ return ┃ │ ║ inner bytecode ║ ║ inner generator ║ + ┗━━━━━━━━┛ │ ║ loop ║ ║ loop ║ + ▲ │ ╚════════╤═══════╝ ╚════════╤════════╝ + │ ╭────┴─────╮ │ │ + │ │ has next │ ╰───────────┬───────────╯ + [no] ╰───┤ frame? │ │ + ╰────┬─────╯ ▼ + │ ┏━━━━━━━━━━━━━━━━━┓ + │ ┃ frame completed ┃ + ╰─────────────────────────┨ or suspended ┃ + ┗━━━━━━━━━━━━━━━━━┛ +``` + +Initially, the VM always pops a frame from the frame stack and then inspects +the type of frame it found. As a consequence the next frame to execute is +always the frame at the top of the stack, and setting up a VM initially for +code execution is done by leaving a bytecode frame with the code to execute on +the stack and passing control to the outer loop. + +Control is dispatched to either of the inner loops (depending on the type of +frame) and the cycle continues once they return. + +When an inner loop returns, it has either finished its execution (and left its +result value on the *value stack*), or its frame has requested to be +suspended. + +Frames request suspension by re-enqueueing *themselves* through VM helper +methods, and then leaving the frame they want to run *on top* of themselves in +the frame stack before yielding control back to the outer loop. + +The inner control loops inform the outer loops about whether the frame has +been *completed* or *suspended* by returning a boolean. + +### Inner bytecode loop + +The inner bytecode loop drives the execution of some Tvix bytecode by +continously looking at the next instruction to execute, and dispatching to the +instruction handler. + +``` + ┏━━━━━━━━━━━━━┓ +◄──┨ return true ┃ + ┗━━━━━━━━━━━━━┛ + ▲ + ╔════╧═════╗ + ║ OpReturn ║ + ╚══════════╝ + ▲ + ╰──┬────────────────────────────╮ + │ ▼ + │ ╔═════════════════════╗ + ┏━━━━━━━━┷━━━━━┓ ║ execute instruction ║ +───►┃ inspect next ┃ ╚══════════╤══════════╝ + ┃ instruction ┃ │ + ┗━━━━━━━━━━━━━━┛ │ + ▲ ╭─────┴─────╮ + ╰──────────────────────┤ suspends? │ + [no] ╰─────┬─────╯ + │ + │ + ┏━━━━━━━━━━━━━━┓ │ +◄──┨ return false ┃───────────────────────╯ + ┗━━━━━━━━━━━━━━┛ [yes] +``` + +With this refactoring, the compiler now emits a special `OpReturn` instruction +at the end of bytecode chunks. This is a signal to the runtime that the chunk +has completed and that its current value should be returned, without having to +perform instruction pointer arithmetic. + +When `OpReturn` is encountered, the inner bytecode loop returns control to the +outer loop and informs it (by returning `true`) that the bytecode frame has +completed. + +Any other instruction may also request a suspension of the bytecode frame (for +example, instructions that need to force a value). In this case the inner loop +is responsible for setting up the frame stack correctly, and returning `false` +to inform the outer loop of the suspension + +### Inner generator loop + +The inner generator loop is responsible for driving the execution of a +generator frame by continously calling [`Gen::resume`][] until it requests a +suspension (as a result of which control is returned to the outer loop), or +until the generator is done and yields a value. + +``` + ┏━━━━━━━━━━━━━┓ +◄──┨ return true ┃ ◄───────────────────╮ + ┗━━━━━━━━━━━━━┛ │ + │ + [Done] │ + ╭──────────────────┴─────────╮ + │ inspect generator response │◄────────────╮ + ╰──────────────────┬─────────╯ │ + [yielded] │ ┏━━━━━━━━┷━━━━━━━━┓ + │ ┃ gen.resume(msg) ┃◄── + ▼ ┗━━━━━━━━━━━━━━━━━┛ + ╭────────────╮ ▲ + │ same-frame │ │ + │ request? ├────────────────╯ + ╰─────┬──────╯ [yes] + ┏━━━━━━━━━━━━━━┓ │ +◄──┨ return false ┃ ◄──────────────────╯ + ┗━━━━━━━━━━━━━━┛ [no] +``` + +On each execution of a generator frame, `resume_with` is called with a +[`VMResponse`][] (i.e. a message *from* the VM *to* the generator). For a newly +created generator, the initial message is just `Empty`. + +A generator may then respond by signaling that it has finished execution +(`Done`), in which case the inner generator loop returns control to the outer +loop and informs it that this generator is done (by returning `true`). + +A generator may also respond by signaling that it needs some data from the VM. +This is implemented through a request-response pattern, in which the generator +returns a `Yielded` message containing a [`VMRequest`][]. These requests can be +very simple ("Tell me the current store path") or more complex ("Call this Nix +function with these values"). + +Requests are divided into two classes: Same-frame requests (requests that can be +responded to *without* returning control to the outer loop, i.e. without +executing a *different* frame), and multi-frame generator requests. Based on the +type of request, the inner generator loop will either handle it right away and +send the response in a new `resume_with` call, or return `false` to the outer +generator loop after setting up the frame stack. + +Most of this logic is implemented in cl/8148. + +[`Gen::resume`]: https://docs.rs/genawaiter/0.99.1/genawaiter/rc/struct.Gen.html#method.resume_with +[`VMRequest`]: https://cs.tvl.fyi/depot@2696839770c1ccb62929ff2575a633c07f5c9593/-/blob/tvix/eval/src/vm/generators.rs?L44 +[`VMResponse`]: https://cs.tvl.fyi/depot@2696839770c1ccb62929ff2575a633c07f5c9593/-/blob/tvix/eval/src/vm/generators.rs?L169 + +## Advantages & Disadvantages of the approach + +This approach has several advantages: + +* The execution model is much simpler than before, making it fairly + straightforward to build up a mental model of what the VM does. + +* All "out of band requests" inside the VM are handled through the same + abstraction (generators). + +* Implementation is not difficult, albeit a little verbose in some cases (we + can argue about whether or not to introduce macros for simplifying it). + +* Several parts of the VM execution are now much easier to document, + potentially letting us onboard tvix-eval contributors faster. + +* The linear VM execution itself is much easier to trace now, with for example + the `RuntimeObserver` (and by extension `tvixbolt`) giving much clearer + output now. + +But it also comes with some disadvantages: + +* Even though we "only" use the `async` keyword without a full async-I/O + runtime, we still encounter many of the drawbacks of the fragmented Rust + async ecosystem. + + The biggest issue with this is that parts of the standard library become + unavailable to us, for example the built-in `Vec::sort_by` can no longer be + used for sorting in Nix because our comparators themselves are `async`. + + This led us to having to implement some logic on our own, as the design of + `async` in Rust even makes it difficult to provide usecase-generic + implementations of concepts like sorting. + +* We need to allocate quite a few new structures on the heap in order to drive + generators, as generators involve storing `Future` types (with unknown + sizes) inside of structs. + + In initial testing this seems to make no significant difference in + performance (our performance in an actual nixpkgs-eval is still bottlenecked + by I/O concerns and reference scanning), but is something to keep in mind + later on when we start optimising more after the low-hanging fruits have + been reaped. + +## Alternatives considered + +1. Tacking on more functionality onto the existing VM loop + implementation to accomodate problems as they show up. This is not + preferred as the code is already getting messy. + +2. Making tvix-eval a fully `async` project, pulling in something like Tokio + or `async-std` as a runtime. This is not preferred due to the massively + increased complexity of those solutions, and all the known issues of fully + buying in to the async ecosystem. + + tvix-eval fundamentally should work for use-cases besides building Nix + packages (e.g. for `//tvix/serde`), and its profile should be as slim as + possible. + +3. Convincing the Rust developers that Rust needs a way to guarantee + constant-stack-depth tail calls through something like a `tailcall` + keyword. + +4. ... ? + +[`genawaiter`]: https://docs.rs/genawaiter/ diff --git a/tvix/eval/proptest-regressions/value/mod.txt b/tvix/eval/proptest-regressions/value/mod.txt new file mode 100644 index 000000000000..05b01b4c768b --- /dev/null +++ b/tvix/eval/proptest-regressions/value/mod.txt @@ -0,0 +1,10 @@ +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +cc 241ec68db9f684f4280d4c7907f7105e7b746df433fbb5cbd6bf45323a7f3be0 # shrinks to input = _ReflexiveArgs { x: List(NixList([List(NixList([Path("𑁯")]))])) } +cc b6ab5fb25f5280f39d2372e951544d8cc9e3fcd5da83351266a0a01161e12dd7 # shrinks to input = _ReflexiveArgs { x: Attrs(NixAttrs(KV { name: Path("𐎸-{\u{a81}lq9Z"), value: Bool(false) })) } +cc 3656053e7a8dbe1c01dd68a8e06840fb6e693dde942717a7c18173876d9c2cce # shrinks to input = _SymmetricArgs { x: List(NixList([Path("\u{1daa2}:.H🢞ୡ\\🕴7iu𝋮T𝓕\\%i:"), Integer(-7435423970896550032), Float(1.2123587650724335e-5), Integer(5314432620816586712), Integer(-8316092768376026052), Integer(-7632521684027819842), String(NixString(Smol("ᰏ᥀\\\\ȷf8=\u{2003}\"𑁢רּA/%�bQffl<౯Ⱥ\u{1b3a}`T{"))), Bool(true), String(NixString(Smol("Ⱥힳ\"<\\`tZ/�൘🢦Ⱥ=x𑇬"))), Null, Bool(false), String(NixString(Smol(";%'࿎.ೊ🉡𑌊/🕴3Ja"))), Null, String(NixString(Smol("vNᛦ=\\`𝐓P\\"))), Bool(true), Null, String(NixString(Smol("\"zાë\u{11cb6}6%yꟽ𚿾🡖`!"))), Integer(1513983844724992869), Bool(true), Float(-8.036903674864022e114), Path("G࿐�/᠆₫%𝈇P"), Bool(false), Bool(true), Null, Attrs(NixAttrs(Empty)), Float(-6.394835856260315e-46), Null, Path("G?🭙O<🟰𐮭𑤳�*ܥ𞹉`?$/j1=p𑙕h\u{e0147}\u{1cf3b}"), Bool(false), Bool(false), Path("$"), Float(7.76801238087078e-309), Integer(-4304837936532390878), Attrs(NixAttrs(Im({NixString(Smol("")): Float(-1.5117468628684961e-307), NixString(Smol("#=B\"o~Ѩ\"Ѩ𐠅T")): String(NixString(Smol("Kঅ&NVꩋࠔ'𝄏<"))), NixString(Heap("&¥sଲ\"\\=𑖺Q𚿾VTຐ[ﬓ%")): Integer(-9079075359788064855), NixString(Smol("'``{:5𑚞=l𑣿")): Bool(true), NixString(Smol("*𐖚")): Bool(false), NixString(Heap(":*𐖛𑵨%C'Ѩ")): Null, NixString(Smol("?ծ/෴")): Bool(true), NixString(Smol("W𑌏")): Float(-1.606122565666547e-309), NixString(Smol("`𐺭¥\u{9d7}𖿡1Y𖤛>")): Float(0.0), NixString(Heap("{&]\\𞅂𞅅&7ସ")): Path("\"*o$ଇ🛢𞸤�🉐�🃏?##bﷅ|a¥࿔ᓊ\u{bd7}𑆍W?P𑊌𑩰"), NixString(Smol("Ⱥ\u{10a3a}a/🕴'\u{b3e}𐦨Sᅩ$1kw\u{cd5}B)\u{e01b7}1:R@")): Path("-*꣒=#\\🄣ﭘ𑴃\\ᤖ%3\u{1e00e}{YബL\'GE<|aȺ:\u{1daa8}𐮯{ಯ𝑠\\"), NixString(Smol("מ'%:")): Integer(-5866590459020557377), NixString(Heap("୪3\u{1a79}-l\u{bd7}Ξ<ন<")): Float(1.4001769654461214e-61), NixString(Heap("ங𞹔𑊍")): Path("*%q<%=LU.TჍw𭴟[Ѩ𑌭𞁈?Ì?X%מּ¥𞲥𞹒ౚ"), NixString(Heap("டѨמּ&'𐌺𫈔.=\u{5af}\u{10a39}₼\\G")): String(NixString(Smol("ₙ.\u{10f83}<x/Ⱥ𝼐$=𑵧{jE𞹺/f*𐢯ഒ𖿣﹂:ᨧ𐞴¥`%"))), NixString(Smol("𐠈𐠁`\u{b57}ꛪ`@$y")): Float(1.2295541964458574e-308), NixString(Smol("𐮫`:𐠂$4%\u{ac5}𐕯🕴")): String(NixString(Heap("='/𑍐<🕴\"=I᭻9𐮙\u{1e01b}"))), NixString(Heap("𑌏({?kG:ﺡ𑌵0𝒚q\"ঐ")): String(NixString(Heap(":ਕ"))), NixString(Heap("𑌐𞸡?𑙓,?🩒7\":'..'<𐀏𑓒Ეd𑊈ῠ𖽙3'&🀪𞅏")): Path("."), NixString(Smol("𑒬𑵡?KᲾWȺ`ᅯ0{<ኘzEÃ\"û𝤕$🞀𝋯�.\"𐕾\u{1daad}㈃\\𐤿ʊs")): Integer(2276487328720525493), NixString(Heap("𝔾\u{11c9f}<Δ]T𐞡𐞵𞹉�")): Bool(false), NixString(Heap("🕴Ⱥ\u{f82}/%*𛅐᠐Ѩ$🢕 *�)𝓁𑃀𝒦𐝂ö𑤕Ѩh")): Path("🕴")}))), String(NixString(Smol("&m𝈶%&+\u{ecd}:&¥\u{11d3d}°%'.𞹙2\u{10eac}-ඈ\u{11369}𞹰¥𒑴'xѨ𞹝.𑼖V"))), Path(""), Path("𑫒%J\u{11ca3}"), Integer(5535573838430566518), String(NixString(Heap("⿵𛅐🀰Z$,\\/v\\⏇\u{a02}ள𝕁?\u{11ef4}%&/�|\"<Ⱥ&cUÛἜᥣ𐠼𘴄"))), Float(-1.0028921870468647e243), Path(".j*𑣱ÜM𑈅I?MvZy:𐄂�𞹋%?Ⴧ%"), Null, String(NixString(Smol("ퟘ🕴𝒴𒐋$\u{afd}ਃÜѨ`\u{11d44}E\\;\"?𬛕$"))), String(NixString(Heap("/)!.P🇪ਾ'⮮𐰣א=tៜ⮏m:\u{1773}\"Ⴧsቪ+HNk"))), Float(-3.7874900882792316e-77), String(NixString(Heap("Ⱥ<𐺰L:🭝𐡆𞅀Ѩ𑬄a.m𐀼V\"𝋊A𝄀\u{1e131}﹝"))), Path("aᨩ�?"), Float(6.493959567338054e87), Null, Null, Float(1.13707995848205e-115), Integer(-4231403127163468251), Float(-0.0), Float(-1.1096242386070431e-45), Integer(-5080222544395825040), Integer(2218353666908906569), Bool(false), Bool(true), Null, Float(-334324631469448.56), String(NixString(Heap("j%ѨáѨਭ\"᠖𐔅𛲂"))), Null, Float(5.823165830825334e-224), Path("&𞹋𖭖:$\\𑂻&ኊ(𞹋LH{ꟓ@=\\nલ&lyໃd"), String(NixString(Smol(""))), Bool(false), Float(1.0892031562808535e81), Null, Integer(-3110788464663743166), Bool(false), Null, Null])), y: List(NixList([Integer(7749573686807634185), Float(0.0), Attrs(NixAttrs(Im({NixString(Heap("")): Null, NixString(Smol("*<Y")): Bool(true), NixString(Heap("*\u{eb5}*:꒶Ѩ&m🕴🛫:_\u{ecb}1$pk!\u{1183a}b*:")): String(NixString(Heap("🕴𞻰𛃁𞹛𱬒Ⱥ=*]\u{1bad}t\u{11d40}ං𞓤꧑\"Ⱥ\"\u{9d7}𬇨$/Û\"zz*ఏ"))), NixString(Smol("?�𐄀&TbY&<'ᾍ?ⶱ工=%¥Ѩ:<Ѩ")): Null, NixString(Smol("Y𐣵🢅$è𝕊f&5ꬢN🕴🉁z�𐺰Mꕁ")): Integer(7378053691404749008), NixString(Heap("]\u{1712}&🕴{𛲗భ")): Path("`ꬪᝋȺѨ𞹴:Ⱥꕎwp𖩂Ⴧ𑜶ল2/?¥\"DL¥?�\'𞹇𛅒p=Ⱥ"), NixString(Heap("`𝋤\"m\\.𐠼🕴𖺊?")): Null, NixString(Heap("\u{a71}ࡷ6lꙧ{�ன`&�GןQ$(")): String(NixString(Heap("f\"<\"X!"))), NixString(Heap("᪠cꪙ\\ਲࠐ🕴?Ⱥ\u{11c97}<🕴«Ù\u{10eff}𐒣:<íN%y\\𐮙ꩅR\"=e𐺭")): Float(0.0), NixString(Smol("𐮙MჍ:%ஊ\u{dd6}$Ù?,)Z/𑌉?Qo?=\u{b01}cȺ,\\*")): Integer(-750568219128686012), NixString(Smol("𖫵𑶨ಭ\u{10a06}\\C_=🕴\u{10eac}೨喝W]\u{fb3}Ⱥ8")): Path("vLeড়ys..Ѩ�𑈓!ଳ&y<ໂ§{EUⴭ꩗U*\u{1e132}"), NixString(Smol("𞢀TAC\\*2🕴>%Ѩ𑩮P?G\u{a0}f𮗻*v¥Uq\u{b62}")): Float(3.882048884582667e95), NixString(Smol("𞸱𐏍2$ml.?.*U𫊴꭪<~gへ𑾰")): Null, NixString(Smol("🕴%`᭄N{3?k𑓗%:/D𑤘ᅴP^9=Z៦")): Null, NixString(Smol("𫘦*ຢ'7﹠KѨj𝔊q\u{bc0}bલ𐤿%🕴�𞠳�L&¥")): Null}))), Path("A\u{a3c}l᪅q.ȺA&"), Integer(4907301083808036161), Bool(false), Null, Bool(true), Attrs(NixAttrs(Empty)), Bool(false), String(NixString(Smol("᧹H$T൧𞅏=𞹟🕴{[y"))), Path("\u{1da43}$�uῚÈ¥�¥\'\u{b82}ල\u{11d3a}Ⱥ:🆨`𝍨1%`=ꝵ\u{11d41}\\X:*ྈኋ𞸯"), Integer(-8507829082635299848), Integer(-3606086848261110558), Float(2.2176784412249313e-278), Bool(true), Float(-1.1853240079167073e253), Path("=𐠕ߕ*🕴ȺȺ:Ⱥ%L💇Ô\\`Ὑ%🢖ዀ ൿ\\🕴Iቫ=~;\"ȺΌ"), Null, Attrs(NixAttrs(Im({NixString(Heap("")): Float(-7.81298889375788e-309), NixString(Smol("%>=ꨜᇲກ𖭜\u{1a60}Q(/X㈌\"*{ㄟ`¨=&'")): Bool(true), NixString(Heap("%n𛄲Y'𞹧𞺧")): Bool(true), NixString(Smol("&/.ȺV�︒\u{afb}'𞹔~\\Oᬽ�")): Float(8.59582436625354e-309), NixString(Heap("&y")): Path(".<\'ꢿ.W&¥�"), NixString(Smol("'1\u{11d91}¥O-𑨩ȺrὝ¥:Wෳቍq𑼌@^𑊜?")): Bool(true), NixString(Smol("*E𑖬ꦊ¹𛅤ೡ/'𝐔j𐖏𞸹7)ㅚ")): Integer(3181119108337466410), NixString(Smol("*r¥[.ª\u{10a3a}\u{1e132}EB⵰𞹗")): String(NixString(Heap("{🟰𖫧🢚/𐍰'𞸁ൻ🃪�𛲃?s&2𑴉"))), NixString(Smol("*\u{1e08f}Sü🕴N'Eƪ")): Bool(false), NixString(Smol("/=`Ⱥ")): Float(-1.8155608059955555e-303), NixString(Heap("/੮RE=L,/*\"'𑊿=�+�🡨ᢒ𖮁ਹ𑱟Ѩk᧧\"\"R")): Integer(-1311391009295683341), NixString(Heap("?%$'<<-23᪕^ቝȺj\\𐴆")): Null, NixString(Heap("?&?⑁ವ\u{bd7}𑓘\u{112e9}ᝑl9p`")): Integer(-5524232499716374878), NixString(Smol("?״{j\"8ࢿ𞹔")): Integer(1965352054196388057), NixString(Smol("@𬖗{0.:?")): Null, NixString(Heap("C\"$𖮂\"/𐬗IyU_𑴈/N=\u{ac7}𐬉:ꬁ<ꟓ<.えG𖦐I/ᠤ")): Float(-2.6565358047869407e-299), NixString(Heap("G`🟡y𖾝𐣴`#+'<")): Float(-1.643412216185574e-73), NixString(Smol("O𞹤៷.?𒑲")): Integer(8639041881884941660), NixString(Smol("R<X𐚒¥=ጚ,.ᤦ/?{\u{b57}꯴ೝ/¥m🕴პ🟰f𒑱X_.")): Null, NixString(Heap("R珞એ�$f")): Null, NixString(Heap("ZbȺὖ")): Integer(300189569873494072), NixString(Smol("\\9r𝒢𐩈¥Ð𐼚\\?Ѩ{$")): Integer(-5531416284385072043), NixString(Smol("`j$�ⓧ\u{16b34}'ⷆt\\𞠏|🢒'%&𑂕𐖼/$\u{ac7}")): String(NixString(Heap("1=¡<𐞵១🢜Ð℧p\\4𐨗𐤿=ශ`.[<\u{dd6}."))), NixString(Smol("`ᥳ")): Bool(true), NixString(Heap("`𐨕``ቓ𑜿$*Dᤱ`:/}🕴N'𘅺ൎ7")): Path("/"), NixString(Heap("fෳ\\ßϐ*𞸭'%𑤁$jા=:Ѩt{\"0ߢ/ಐ𐁆𞹛i𗹱'🕴")): Bool(false), NixString(Smol("n6&<𞟤'JB¦x🩤vቚ\u{1e008}Ѩ¥𐖛j🕴b¥𐼙'\u{c00}a")): Path("\u{110c2}p:Ѩ\u{1a58}ⶰO<?𞸡𐖙"), NixString(Smol("ntU爵�^🪫'%&ਰ/")): Bool(false), NixString(Smol("o")): Path("¥�\'/ìRV𐄚"), NixString(Smol("t\u{2df0}b.𐠈¹𰃦*𖭞:🮻𛅤7𞢼𑊩XL\\ਐ.N")): Null, NixString(Heap("yg'«🡓")): Path("/`]𞻰`\u{1cf0c}Ë\u{c55}<bȺ�!"), NixString(Heap("y🕴𑤑/ල$🢱\\~\u{aaec}`ຄ")): String(NixString(Smol(":b🭑𑅟ౘත'�:hiⷊ*{*/ꙟ"))), NixString(Smol("{?^n𑴉🩻៵o<ಮz-뗨")): Bool(false), NixString(Smol("ð୭ண?𑅢\\<<%?<=$[<d𑋶\\w𐖔u<")): Attrs(NixAttrs(KV { name: Path("ଛ🕴R`𱡍=\u{2028}?¤𐔘ῐw41A𑃰𑥙<&:/"), value: Integer(-4007996343681736077) })), NixString(Smol("ù5 \u{c4a}ᝮ")): List(NixList([Integer(3829726756090354238), Bool(false), Integer(-7605748774039015772), Integer(-2904585304126015516), Float(1.668751782763388e125), Path("ጓ𑍐״𑖁$~෮¥"), Float(0.00010226480079820994), String(NixString(Heap("y¥C\u{c62}3"))), Integer(4954265069162436553), String(NixString(Heap("f𝔗/^%}₧Ѩ᪈\u{aa43}$𐆔𘴄𑤤N\u{c47}𑃹𑧄⿹𞹻"))), Bool(false), Float(7.883608538511117e36), Path("*ષඉ"), Integer(8893667840985960833), Null, String(NixString(Heap("Ѩ|f2\u{11300}Ⱥ\u{11374}🕴࿘\\e$ᢊR𑌐๔Ⴧ$�'`\u{1e028}🕴"))), Path("<Uc?𞹑\"🕴ᥳ:/ꬍ=ꮫ\\:ךּ&&jq\u{11d41}<_�%(῝Ⱥ�"), Float(-3.7947462255873233e189), Integer(1963485019377075037), Null, Integer(2642884952152033378), String(NixString(Heap("=\\Ѩqদ)%@�NH𑼄ⴭ.ዀ*Ⱥ$&\u{d01}`ভI𑩧h\u{1da9e}v𑀰/wl"))), Float(-3.1057935562909707e-153), Path("ÊȺ𐬗$?\'$ዀ%J`_𞹤"), String(NixString(Smol(":`'ຣ𐅔':𑴯\"R&r2h5\\�\\ਲ�<\u{11c3c}¥{Ⴭ!\":𝕆<*"))), Path("`%�Ⱥ࠽¥3Ⱥ?r&�🕴n<𐭰ȺȺᛞ$Ⱥ\u{a41}$ﭱප%\u{1b6b}.𖿰🛱?d"), String(NixString(Smol("ම𑌰d𞹺B𞹩&𑣄$ꬃO(ಝ{�/¥"))), Null, Path(""), Null, Integer(-5596806430051718833), String(NixString(Heap("Ѩ.<\\?𑴠¥<ப=~湮𑤉`v\\Hf\u{ac5}Lᾑ&.𞥟🕴5A\\'¥"))), Integer(-4563843222966965028), Integer(-1260016740228553697), Path("𑼊W𐄡ຄ<u\u{11357}e"), Float(-1.4738886746660203e-287), Float(-2.1710863308744702e271), Integer(-4463138123798208283), Null, Integer(7334938770111854006), String(NixString(Smol("&<\\𖺎CKཛ="))), Null, Float(3.6654773986616826e238), Path("¥\"=ᝯ𐢭$ஏ{.𐢫8ujx"), Bool(true), Path("𐩼ᨅ�\\ம}oѨ𐖌Y$�/z𑇧/`%¼𖭘𑃦:ᥲ$-"), Integer(4610198342416185998), Integer(-8760902751118060791), Path("Hí{𖬩~\u{733}{𝒹\':𞅀ݼ:𑣘Aஜp𑦧𞁦K=Z*"), String(NixString(Smol("\\\"\\O=𝆩𐝋0🕴\">.🟇/𝔇`¥ⷒ"))), String(NixString(Smol(".𝼩𑵢"))), Path("&cἾV&🈫WȺ2{:Uஔi𢢨�$\\Ѩⷉ<+𞥑︾�🢅𝄦"), Bool(false), String(NixString(Smol("?H\"ᝨᛧD🕴e"))), Bool(false), Null, Path("~"), String(NixString(Heap("*<𐄘బu;.𝁮🛵𞸹g\\mF%[LgG.𐭸𐫃*倱🕴`"))), String(NixString(Smol("`ó¥!0ѨW.ଠಏퟞ\\ਫ਼?🫳"))), Integer(5647514456840216227), Null, Bool(true), Bool(false), Integer(-7154144835313791397), Path("\\=🕴�ᣲ*𞹷𛲕cꮈ🫣CȺÏ𑤉נּ/$ਜ.\u{dd6}*%আ`𐄿y꡶"), String(NixString(Smol("`2¥/�ꫤX\"Lᱽ"))), Float(-1.5486826515105273e-100), Bool(false), Path("\\A6𝼥^]<🢖"), Null, Path("G`6𱡎%\u{1e08f}ᳰ"), Float(0.0), Float(-5.1289125333715925e299), Integer(-2181421333849729760), Bool(false), Null, Float(1.8473914799193903e206), Float(-0.0), Integer(-1376655844349042067), Integer(-5430097094598507290)])), NixString(Heap("Ϳ¥%h:?=$🟙p\u{1cf24}*𑴠Ⱥ]Xb")): Path("]l\'*𑇡\u{1e08f}*𝄍&,÷nc෴G¥,🕴𑌏+`?"), NixString(Smol("\u{85b}/")): String(NixString(Smol("%Ⱥ{𑤉pO𑱀$d/ñPF\"="))), NixString(Smol("\u{a51}H𞹾$`𐒡:�¥𐝡{𐺙౾�i${ಇTG��¥{`Òބ^")): Path("<𛁤Eh𑱅"), NixString(Heap("ક\"1ਐȺ")): String(NixString(Smol("𞹉ÕI$𑁔﹫xpnὝ{`RgX.&]ଘ"))), NixString(Smol("ங+ח𐼌ஏ:=R0D\u{afe}ð<%𖭴?/CT%Ⱥo=?𞥔𐴷\"")): Null, NixString(Heap("\u{e4d}/Ð\u{11d3c}m6౨࿒ਫ਼K¥u\\𝐪ୋ")): String(NixString(Smol("𐋴+Z\\𞸻kמּﲿ𐤨tn>ΐ/>3Ѩ<E{𐧆!"))), NixString(Heap("ጒ.ퟴ%\u{1e016}🕴𑨍ἢ=~\\:7𐦜")): Integer(7492187363855822507), NixString(Smol("ⱅ/𞹯=\\ꬆ^ᰢ.F𒿤𒾖ȺlȺÐ")): String(NixString(Smol("𞹗R"))), NixString(Heap("ⶤB2.$\u{10a05}𖫦&*\";y$¸𛲒𑊲U🕴Û\\🕴𞹂E಄,୨")): Float(4.8766603500240926e-73), NixString(Smol("ꬮ\\'\"A\\\\\"R.")): Bool(false), NixString(Heap("漢$%`Ⱥ")): Float(-6.1502027459326004e57), NixString(Heap("ᅵ𛲅</\"\\:�=h𑵥V=\u{c4a}")): Path(""), NixString(Heap("�I𐔋\u{1bc9d}\u{1e029}𛅹𑻨𐊾I¥?ѨѨ:�È\\'𞟾'Ѩ")): Float(4.528490506607037e180), NixString(Heap("𐮪qՎ4𝒦?F𐙍?")): String(NixString(Heap("`🂻𐺭` 𑊚ൽ/ⶻ🛶fȺ(f𐖻Έ{᪐𑌫Z%𑍍ꦘ𐴐&zdৡ𑼩"))), NixString(Heap("𒓸ං#.Ѩ㈞�i")): Path("$/"), NixString(Smol("𖤔𐖔%𖭖\u{1bc9e}")): Float(-2.70849802708656e-257), NixString(Heap("𖾖:y'𐮫dmvਫ਼`*QR𐏐::P=\\B🕴\"c𒓕eᎾΑ")): Path("\\ÔA"), NixString(Heap("𞄐'𖭕:𖽭𐖘\"{Ⱥ.\"⺈??ৎ🁡ଊ𦩽🕴𞣏w.:ࡰ")): Null, NixString(Heap("𞺖Ⱥೇ𥢁⹂Ѩ@ઋ𞹏<Ⱥ6ⴏ𑽗ஶ=L𑍐M.<ꭚ*J\"@~𝁌$\\]")): Float(-2.5300443460528325e91), NixString(Smol("🕴7𪻎𑃦𑤏+ῴᎣ\\ౝ?ட\"\\ꜭ")): Integer(8622149561196801422)}))), Path("I<:🫢:.𑋛ோ\'ⶣ[𑆔%)எM!1<ூ-J>/`$🠁<\\u*ল"), String(NixString(Smol("M|s?ଏ\\"))), Float(-0.0), Integer(6467180586052157790), Bool(false), Bool(true), Float(8.564068787661153e-156), Float(6.773183212257874e294), Integer(4333417029772452811), List(NixList([String(NixString(Smol("/%\u{9d7}𖮎\u{b43}𑰅𝋓ᝠi`\u{a02}aѨ𐢒>ⴭȺ𑌃C፧Â𐭰>G\"ፙ𐍁𐠈&$"))), Integer(2000343086436224127), Integer(3499236969186180442), Integer(4699855887288445431), String(NixString(Heap("ங𐮛𖿣Y𘡌`.𒑳𞋤R7$@`")))])), Bool(false), String(NixString(Heap("*🕴,/𐀬tyk𒑰\u{f90}"))), Integer(5929691397747217334), String(NixString(Smol(".=𝒢.Eⶇ⁃੮\u{fe04}𛅕C៰🢝Ὑ`{.g¥¥"))), Path("\\*J(\'%\u{1a68}k\':ⷋ?/%&"), Bool(false), Float(3.7904416693932316e-70), String(NixString(Heap("/Ⱥc$𐠼<�⾹ഉ "))), Integer(3823980300672166035), Null, Null, Bool(true), String(NixString(Heap("$�𐖔aᅨවw-=$🕴$𞹟xѨb🫂,mຄ"))), Float(-5.5969604383718855e-279), Path("Ἓቇ !\'𐍈𑙥&ಐz"), Bool(true), Integer(429169896063360948), Float(8.239424415661606e-193), Path(""), Attrs(NixAttrs(KV { name: Null, value: Float(-3.5244218644363005e64) })), Float(2.1261149106688998e-250), Float(2322171.9185311636), Integer(5934552133431813912), Integer(5774025761810842546), Float(7.97420158066399e225), Integer(4350620466621982631), Attrs(NixAttrs(Empty)), Integer(-6698369106426730093), Bool(false), Null, Null, Float(-5.41368837946135e190), Null, Path("\u{1112b}`¥𐀇=h𛅕`/?qG%GȺ\u{cd5}𝔼.𞊠\'\'."), Null, Bool(true), Float(-1.0226054851755721e-231), String(NixString(Heap("\"$ල%𐴴*s\"D:ᘯᜩ9𑌗"))), Integer(-713882901472215672), Path("/{𝇘𑒥*ﬧH`ਸ਼í$𞲏ῄZ`🫳𓊯vg]YசȺS𞹢𑼱ó3")])) } +cc b0bf56ae751ef47cd6a2fc751b278f1246d61497fbf3f7235fe586d830df8ebd # shrinks to input = _TransitiveArgs { x: Attrs(NixAttrs(Im({NixString(Heap("")): Bool(false), NixString(Heap("\"𑃷{+🕴𐹷𐌉𞥞'🯃ⶢvPw")): Bool(false), NixString(Heap("#z1j B\u{9d7}BUQÉ\"𞹎%-𑵣Შ")): Path("𐆠az𐮯\u{c56}�ㄘ&𞄕ନz[zdஐ�%𐕛*ȺDዻ{𞊭꠱:."), NixString(Smol("&?")): Float(-1.47353330827237e-166), NixString(Smol("&🡹B$ѨK-/<4JvѨȺn?\u{11369}𐠈D-%େ/=ਹ𛂔")): Float(-1.2013756386823606e-129), NixString(Heap("'")): Float(-0.0), NixString(Heap(".`%+ᩣ\"HÎ&\"𐊸A%ἚO🅂<𞺦¥ைEAh.⥘?𑩦")): Integer(-5195668090573806811), NixString(Heap("4l\"𖫭¥r&𐡈{\u{11c9b}&磌લ𐀷𑊣Â\"'𓄋{`?¬𝔔")): Attrs(NixAttrs(Im({NixString(Heap("\"!`=Wj㆐ᬢ\\è\u{1183a}T\u{11046}ꬕ&ȺȺ")): Float(-8.138682035627315e228), NixString(Smol("$E𑱼ஃ:\"ெB🮮.�🡾🕴:𑍇𛲜")): String(NixString(Smol("Qf1𝜻A\\L'?U"))), NixString(Heap("$kส𐀽ச𐠸<`\u{f37}5ቘ\u{cc6}G\"1ລÕ𑙓ἡמּt/𖭒𝓨₃ÑѨ𑤨a𖿰ѨA")): Integer(-8744311468850207194), NixString(Smol("%'?Xl(ࡻ.C+T𝒿𘴈L-𑌳\\\u{1cd1}bK>SA")): Path("\\🕴એѨ𐹼-﹔Ð𘴂:?{\u{1e02a}¥\u{c46}#𚿾?K"), NixString(Heap("%e🕴v𐢕&:שׂத")): String(NixString(Heap(""))), NixString(Smol("%¥\u{1d167} |M")): String(NixString(Heap("𒌶$O🁹/𑑝ò"))), NixString(Heap("&=?\u{ec9}Ⱥuい𑜿Ѩ/�𝼦\"$𑤷T{?ⶾ,𑋲9Ⱥ�")): Null, NixString(Smol("&𑌖𐎀?`F𑍍g¥`\"Ⱥ𖿢ລ𝼦L{\u{b01}ѨO*.&K%🫳\"🕴f𑆓¥/")): Null, NixString(Smol("'P𖮆𐽀𐓲𝔊SⶄᰎE\\kF𑦣`�ఎG&/K*")): Path("ᠦlѨꬮ𞸷:\u{1344f}ዀME.&\u{aff}𐧣ᡋb𐦟ቘᄨ"), NixString(Heap("'౻🕴_𐠷𑍡!')?&🕴.\\{{𐾵�*>sj\u{9e3}న`Ѩ¤")): Bool(true), NixString(Smol("+ས&1:᳇P%{r?Ѩ/d`𐠷\u{1ac5}>'🢧")): Bool(true), NixString(Smol("3\\Ѩ龜=&පౝ𑈋🢱\"/<.&🕴/ૠᥬ ̄|&Â$'")): Bool(false), NixString(Heap("6|ዅ`ժy𐠃*")): Bool(false), NixString(Smol("8=᠊<ඩTRળ(Q𑝆=෨\\?Ϳ{>n&")): Null, NixString(Heap(":6zꬑ\\फ़\u{1cf15}װ𝼦🛩Lv<?\u{10a3a}*𑌂wೝ𝓃u%C.R%$෪l")): Bool(true), NixString(Smol("<V🛷VȺ𐩈𓈤ౝ&ₔѨ")): Integer(-2521102349508766000), NixString(Heap("=/e�S=d𐖪\\bᘔ8$Z'")): Float(1.7754451890487876e-308), NixString(Smol("D𑊓Ꮅ%")): String(NixString(Heap("𑌏ë;ල"))), NixString(Heap("E</")): Float(1.74088780910557e-309), NixString(Heap("Fༀ.൷𒀖>¹𐍮Ⱥ$M\\\u{10a3f}ಎ𒿮Ⴭ5ዓ6{🕴:")): Null, NixString(Heap("GCક\\¥{4🕴?ࠏ🕴=🕴𖭝'?𐝥%Ⱥ\u{10f48}�kኲ:%¥")): Path("𝉅w𞹟`ῼ`Ѩh/\u{11d3d}\u{65e}j/🉥\\&𛅥אּѨ𑧒\"Ѩkໄ\\a~𑚩-(:"), NixString(Smol("K|*`\u{9e2}Ѩ𖠉%𑦺=u</ৌ⮕ꬋ\u{a01}*6𐩒P᰿𞅎'🉁")): Bool(false), NixString(Smol("MSᅳ")): Integer(-8585013260819116073), NixString(Smol("P&ꩁ࡞🩠&𑈳:⮲.ز")): String(NixString(Heap("G]Ѩ{D"))), NixString(Heap("R𝒞j:𝄁{𑍝ﹰ\u{309a}5Ѩyべk🟰𑊈𱁿:𑑡'🉥ⷊ{ౠ<=&:ᦗ*_K")): Float(1.215573811363203e25), NixString(Heap("Xoy_Z🕴𞅈ⷈ⁵\"ݭ)<𑽎]𒒴")): Float(-2.0196288842285875e215), NixString(Heap("[y𞹨/")): Path("(p\'/.Ⱥࡹ?🄎ቌm�>>%z~{`%4ѨȺ𑫎"), NixString(Smol("[𝒻W𑬀\")a𛄲𑰄&ⶹ.\":𖮂")): Path("\u{a51}<:ꫧ㈘🕴𑒻gȺ𑌃D🮦𫝓"), NixString(Heap("^?𔕃{\"ꢭ𝍥{💿o𝼨\u{b01}Ѩõ")): Bool(true), NixString(Smol("`.\u{1e005}&Ⱥ🕴=%𑩦৩\u{a3c}{ⷜ:F�h'\":Ὓ࠸")): Integer(958752561685496671), NixString(Smol("`𑤓(b𝔵3=𞁕<\u{f93}𐇞𖭶$🕴¥.:&?=oఋN\u{9c3}")): Float(-1.2016436878109123e-90), NixString(Heap("`🂣`𐤢.üI¥::.$)𐨵/\\𒑚ZὍ𖹕e𝒟具Yຈ|🀙࡞")): Path("/ᅵȺ𝒽Ⴭ<🉀u)🕴מּ/ந"), NixString(Smol("d®댓7M𑍐_‽sxⶋ𖩈Xⶭ]ⷓ?`o𞥟\"@,ㄛ𞲔ೠ🕴j#y{'")): Path("ቝg𑅇ዀ🕴𐌼�t=:.|*]�🕴,Ѩ*ᝡㄧ¥nȺ"), NixString(Heap("eቊഎȺﹰ¾0𐣰/அ🠄r~=𐞌_ఽ")): Integer(-4346221131161118847), NixString(Smol("e𐠪ቘ<$=Q\u{ecc}𚿷*}ଐ$/𐂢Y?y\u{11d3d}fଐ𐋹\u{20ed}H\\�û'g<L{")): Integer(1302899345904266282), NixString(Heap("fJ🉀!6$F𑰤𐎁C&ౚ𞓤T\"\u{d81}«Տ𘴃.𑰍Â﬩*")): Float(-9.550597053049143e239), NixString(Heap("j﮼\"🕴$%'Gૌ3?\u{302d}¥")): Bool(false), NixString(Heap("u~Ⱥ¥𝣤YN𐂜 ¥' `r𐿄/\u{b57}")): Integer(3809742241627978303), NixString(Smol("z%c流ü🕴$`.F*`Ѩଏ<\"𣅅'<3p=r:Y\u{1a60}ড:/]𐾁")): String(NixString(Smol("\\*E■{:🕴"))), NixString(Heap("{<B9-Y𑣊N61ѨH.¡\\ꮬ'Ãkô")): Path("=𑥘^*¥K𚿾સ*Ðᝰf[:P𐤅*<⻡<\"8"), NixString(Smol("{ຄ±𑼉Ѩ`ⶮ𚿺Í%𞹡`!/")): Integer(-7455997128197210401), NixString(Smol("|🕴ේ0\u{1e131}🡑=ਜ਼_`𝔗.ો1ㄈ𐹣\u{1773}ꭆⵯ\u{1c2e}ࠨ𐧔੫=\u{b42}1\u{bd7}�/�¥$")): Null, NixString(Heap("¥⵰ᅬ<*'Z¥\"Ὓ$ᥖᅭ𚿱\"ꥳÒ𖿱𞓕Ѩ9MÌ¥u{ୈ<﹨")): Float(6.4374351060383694e243), NixString(Smol("¥🕴=r`z&.<V<.Ⱥ๏\"")): Bool(true), NixString(Smol("Ê9𝐐/JkᡶUl\"🕴{i")): Path("z/¥&Wzꡭ`?Ѩ\\🟰þ၈$ࠓ"), NixString(Heap("Ù𑥘c\"�\u{8ce}𑤸𝆥೮?~")): Bool(true), NixString(Heap("ãU&𐔐/")): Bool(false), NixString(Heap("ѨѨ?<'?࿎ȺՉⷊ$\u{fe09}ꞙ")): String(NixString(Smol("Ό/`j🕴🕴𐞂🕴\u{ce3}ਲ$𘟛ºװ?𞸮J𝍣Q\"}🕴q𐧸ࢺ\u{a48}"))), NixString(Heap("ࡩ꯳%^ౝ")): String(NixString(Smol("{%"))), NixString(Heap("\u{b57}🪃ⷚȺMC?¥\u{11c9a}+:*<B𐖰y~/.1&$𐺭hর\"\"")): Path("ቕ𝌫=(|R{ei\u{1a5b}Z!2\u{c3c}+<rí:\"f"), NixString(Heap("ங¸4ᨔ\u{17cb}ਜ'ಣ¥z&=='c𜽫ࡠﹳ꠲𐨖এ<¥🕴𑙫𐖷")): Integer(-2404377207820357643), NixString(Heap("ቘ🕴{ȺL'.")): Null, NixString(Smol("ኸ\"𖦆d\\&𐞷C🠀k1\u{eb1}KV%🪿\u{1cf35}>:3𐡾P.\"Ѩ.ໆኍ*'")): Bool(true), NixString(Heap("ኸ8")): String(NixString(Smol("�?Ia🀛*\u{10d27}U¥𐖙9ીㇺIW:%&G+?"))), NixString(Heap("ᱰῙ:ஙᤄ.:*𐺰𝄿勉Ѩ&¥𐧾7&`AຂѨ𑗒&¥𛅕/🕴Ѩ")): Float(-0.0), NixString(Heap("ꝏs𞊡<g=𝕃ᅭRȺ�u𛱜")): Bool(false), NixString(Heap("ꡩ$𰻍🯷@a\\")): Null, NixString(Smol("𐭻𞸧Ã\\Ⱥ𞸢𘌧%h+|@ꖦ*\"~𑄣")): Bool(false), NixString(Smol("𑐑𞟣Ѩ<''¥Ѩ<ອ᪦\"፵𛄲𞀾ឮo")): Float(1.540295382708916e-173), NixString(Heap("\u{1145e}p\u{ac1}e\u{10a05}𝼩#ந*/?🡗\u{c4d}ͼᲿꟑ")): Integer(-4194082747744625061), NixString(Smol("𑘏𞟨:Ѩ?^ c🃊tๆ.Ⱥ9 𔑳&៵ׯ🬔\u{11f01}𬢪*ﷲȺ`𑼆n")): Path("7�$Ì\u{1e00c}海2&𐝀<"), NixString(Smol("𑚆")): String(NixString(Smol("𝒢\u{11727};க\u{c56}P'𝄎🪬*ⷍ\u{1e08f}59/𑨙𐤿ොZ$Jෳ*𞺓"))), NixString(Heap("𑴋¥#=%")): Integer(1639298533614063138), NixString(Heap("\u{11d3d}𑊌o:}=ਫ਼$໓ᅨѨ𑌅6🫄ᅮ🛴")): Integer(4745566200697725742), NixString(Heap("\u{16af4}𞲡🡕:ೈ𝒩$𝄀\u{c3e}ࡨ")): Float(2.8652739787522095e-21), NixString(Heap("𖽩$,®'w5ޤ*𖿣H'6¥ੀ")): Path("+ꡕ:/f"), NixString(Heap("𞋿@%\u{20d6}÷MqȺÏ🪧𞟣&𝼆Ⱥn2?ᦽ")): Path("n𞺣Ꮝ*𐤤FU.T"), NixString(Smol("🕴/Ⱥᜄ&{.\u{e0109}]𐣨|ຆ᱓𐃯🂥𐺭𑤬@$r{Ⱥ‿🕴〧\u{c3e}'X")): Null, NixString(Smol("🕴<V𖬑u\u{bd7}ໆC`xῴ𐄂f.M^ਐ𑼂;%.𐠈¥໔ꘄዀѨઑ")): Null, NixString(Heap("🕴¥\u{10a38}/{\\-?🕴'Ýὓ𛱱`XuᾺ𐊎ࠅ|𞹒𐆗𑌲")): Bool(true)}))), NixString(Heap("9;G'.%𑴆")): String(NixString(Heap("🕴8?$=𞺒%V*\u{9bc}𑊄ଡ଼JbѨ=ળ=𑛉\u{a81}É;ᅤᲿN=ਫ਼:�`𐁙"))), NixString(Smol(":Ѩ\\Lꝅ5𐤀'uꦙ~*肋J")): List(NixList([String(NixString(Smol(".࡞\\P?*<<\"𐖄9&𘚯%Ѩю1G\\౭𒑃&𞹛=9:𞥐@("))), Path("<𝒗ঽ¥<\u{fa7}ୌD={=ଡ଼\"Ⱥ\u{bd7}ઐ$\'�¥𞹼E𐦙🡓t`\u{11357}"), Float(-3.099020840259708e-255), Path("𞟫᪓h𖭁")])), NixString(Heap("<:^X𞹇𞹷\\\u{f7e}\u{1713}^ⴝQ`.|G𝂜𞹍𑐬𐮅{x𑍇סּ\"`🉤𝚚𰳳Ⱥ")): Integer(6011461020988750685), NixString(Smol("<{פּ%#¥🕴d:�,y")): Float(-2.901654271651391e201), NixString(Heap("=/I\"🮧{z&a�<?`L¥𞋿``.|")): String(NixString(Smol("=𖫉m\"ȺhC_"))), NixString(Smol("=L=X'aਿ9.")): Path("<&]Mভ"), NixString(Smol("Eנּ:b$?JὙs𝒢O?ꥠѨ{\\ਸ𝕎𒾮`𐀢")): Float(3.6870021161947895e-16), NixString(Heap("\\�' ìrᩆ#ລB¥𐖻l%𛅥D3{/.4O¥\\.{𐼓^𒾬")): Path("/íR›d🕴-K\u{ac8}"), NixString(Heap("\\\u{1e028}೫I/ͽ4")): Float(-8.628193252502098e81), NixString(Heap("a𐠈/K33/&𝐵¥𐼰.y/🫢\\𐧨𞟹")): String(NixString(Heap("𞹡W<᱂a=ᡞಇ�%&𑴈ࢳ¥𑊈"))), NixString(Smol("eD�¥𐌋x<`B𞣍H\\`èZ𐐅៥{\\?T*")): Bool(false), NixString(Heap("oP𝒬(ѨJਈ𛀺Ѩ\"?T=O𛄲s�🕴LK🝈ከⶩ+\u{1344f}hu?E")): Bool(false), NixString(Heap("vNꟓȺM]")): Float(-2.9637346989743125e232), NixString(Smol("{!¥\\\u{a47}\u{10a38}.E4Ѩ;=R\u{a48}<=/\\&స𐢩N'?.9𑗊\u{7ec}ਸ਼:𐫃")): Null, NixString(Smol("~+!$M.𑐯ౚ\\>.¥𐖥<Q'໙0.kQ{𞹾༺𑼉sEὕ")): Float(9.45765137266644e-294), NixString(Smol("¥;Z$*&+�&\"🉆`ኲ")): Null, NixString(Heap("ãF&<` \u{64c}-:Ѩ*�")): Integer(-7654340132753689736), NixString(Heap("Ⱥ`\u{a81}Ⱥ𞺥\u{11727}<m\u{a51}{$`\u{11d3d}શR/E")): Bool(true), NixString(Heap("Ѩ%gὋ 4𑶠𑤕Y{Q<")): String(NixString(Heap("𛃛Ꟑꩀ𑊈RoR<ৌ:4`ⶥ(ೳ>8*𑌟/𛅧<}&꒩᠙\""))), NixString(Heap("ѨನQ𑠃%=i𘓦ශ\u{1939}𞟭7;ﺸ𐼗ꟘW")): Bool(true), NixString(Smol("ਬͿ𝒩./🪡Ѩ𝁾ໆ+ఫJ{𑰏*\u{cd6}ΐѨ'APȺ*")): Path("Q%︹֏Ὓ<$Ꟗ$ᦣ𐖻Ѩ\u{c3c}MѨ"), NixString(Heap("ଢ଼")): List(NixList([Path("?&�=\u{fe0a}z\"વ🟰}ৱᦥ𐕅\"𞹭MⷝDJຄ"), String(NixString(Smol("𐺱`𞱸Ⱥ🜛?ୈ&Ꞣ=j¥`൷@ਐෂ't=7)ꬖన2Dꪩl𐝣ጓ=W"))), Path("`ಲ=𑘀eલ7𖽧Ù&ᜉ:.ﺴ𐳂\'𞹎n🕴𓊚>𝓓ை:\'{`o"), Null, Bool(true), Float(-1.424807473222878e-261), String(NixString(Heap("𑤅w𑌏ኸ¡ਊ/𞺩Ⱥ\\𛅤𞢊?\""))), Null, Bool(true), Float(-4.714901519361897e-299), Null, Integer(2676153683650725840), Null, Integer(3879649205909941200), Bool(false), Integer(-7874695792262285476), String(NixString(Heap("Wᥳ⑁*\\%ᜦ.⮏꫞Q𞹔L|ௌѨdU=*Ѩ\u{20dc}"))), Null, String(NixString(Smol("U𑐎𖺗$𞹡𞅏𝔯﮴<)&𑌫\\{\u{1acd}wѨ!𞸤𖭓º"))), Integer(802198132362652319), Path(":\u{b4d}Fᝊ:w:꯵"), Integer(-8241314039419932440), Null, Bool(true), Float(-0.0), Float(0.0), Integer(-3815417798906879402), Path("\"Dෆૌ෪\u{b01}ૐ%M№"), Null, Path("ab`𐒨\\{÷௶𖮌$=ë"), Bool(true), Null, Float(2.607265033718189e-240), String(NixString(Smol("𐖤𑤷=6$`:0ѨE\"Ѩ\"𗾮ኴ}.𞠸𞺢ߔ𑵡"))), Integer(340535348291582986), String(NixString(Heap("᠑%*&t𐮮':\\\"ꩂລ'\"𞟳|J\\\\V𑧈𛇝췭𐮬$\u{eb4}"))), Null, Float(-0.0), Float(2.533509218227985e-50), Integer(2424692299527350019), Integer(8550372276678005182), Integer(2463774675297034756), Float(-1.5273858905127126e203), String(NixString(Smol("𐕐𐐀ùI"))), String(NixString(Heap("?:🕴𑴞ﹲѨË𐫬Zf{𝋍{{࿀&\"Ô:<CJN?"))), Integer(-916756719790576181), Float(5.300552697992164e116), Path("𞹝�.?𝌼uଢ଼%~"), Bool(true), Float(-4.423451855615858e107), String(NixString(Smol("Ώﮱ¥8gH^ᛨꟖjR𐢮\"S𖩑𞸻🪁"))), Integer(8503745651802746605), Integer(8360793923494146338), Bool(false), Path("𝄞3¨/𑢸mº~𑍋ha𐫮$⹎\u{dd6}*ᲬѨク?𝔔¿"), Null, Float(-1.116670032902463e-188)])), NixString(Smol("ഌ⾞V𑄾7𑚅D𞅎$\"")): String(NixString(Heap("¤<ꡀὝ¥𞸤𞸇𑴄�ⷍa𑁟O\"f&𘴇`*<z𑨧ᤗ"))), NixString(Smol("ዄS𐾆ꟓ\")ኛ7G<oѨ🕴?𛄲×𑿕ᩣb。𖩒w")): Float(-2.1782297015402654e-308), NixString(Heap("ᩌਲ਼Ⱥﹲ<'=ւລמּ�p")): Path("𐀽𐠄"), NixString(Smol("\u{1a7b}$༄XୈÂ`7\"$*=࡞៣𐀸.tw")): String(NixString(Smol("ﬖ<j%𑊊kૐmȺrᎇ𒄹ëল.y*ᤫCョB½"))), NixString(Smol("Ὸ:𝍷🢰8g>\u{fe0d}RѨꥫ^ῷ&ຈ/'𑌅gው=s𝂀'�𑛂𞴍~𑅮?𝑪")): Float(1.6480784640549027e-202), NixString(Smol("ⴭ8🕴V𞹙2𑶦ං=")): String(NixString(Heap("ਓ𞹗Ѩ𖭨𝓼¥..ᾶ`oU{ₖB\\©:/ѨI𝆍.&🕴:ම𞹉fi*v"))), NixString(Smol("יּN\"@𐦽Ë`𑇑ȺC{¥b$ಭೱp$hS[좵&")): Float(0.0), NixString(Smol("ﺵ🟰\"nj𞺡o&*Oz{Ⱥ\"/\\፨\u{8dd}^v´𞹺`")): Path("𑇞.🕂d"), NixString(Smol("�𞸹מּz?_R'ᥲ<<¥/*﹛Ⱥ�R𑗓𑙥𑜄ೱa'𐀞7")): Float(1.6499558125107454e233), NixString(Heap("𐏊എ🟩ﬔV?Ѩ𝔢𖭡\u{1bc9d}𝌅𑤸ڽ𞹺V.𝕊B⸛*")): Null, NixString(Heap("𐞢u1%<🕴â}𛱷$٭b𛲟ຜv着\"𝕃\\$Ⱥ<ºףּ%ଓ")): Null, NixString(Heap("𐞶𐏃*$🂭eස\u{dd6};𞁝'ᩯ")): String(NixString(Heap("'&ⷘᰎ\"𐠸𑈪"))), NixString(Heap("\u{10a05}/")): String(NixString(Heap("ㄴኲY¥'𑛅Ῥ\"𔕑𐖕נּ🇽$l.=$🠤𛰜-𝒢R{$'$^"))), NixString(Heap("\u{11300}🃆+$�ₜj\"ჇA*r&𑥐𑊙W\u{10a0d}yR<\\Ⱥ׳𞄽C&?")): Integer(6886651566886381061), NixString(Heap("𑍌🕴`'\\G¥<ꬤⶮM%𑵠꒦ૉS\u{9d7}Ⴭඹ8🕴*[Ö")): Float(2.2707656538220278e250), NixString(Smol("𘴀'>$¦C/`M*𞻰¢꩙᠖�*🕴:&è")): String(NixString(Smol("'<ড়ᰦ"))), NixString(Smol("𞊭")): Attrs(NixAttrs(Empty)), NixString(Heap("𞥖🢙=ཏ=�%𛲁$zi¥&꧗𫝒૱G𐧠𝓾$/𐼍𫟨Yhⷕ")): String(NixString(Smol("Us:)?𘘼"))), NixString(Smol("𞹪*𑌳`f𝐋ଐ𝕆j")): String(NixString(Heap("𖭿5^Zq𐨗𑄿𑥄ߤ𑜱2ঐ"))), NixString(Smol("𱾼\u{1e4ef}õ<\u{a0}{ö\\$�ῷq")): Null}))), y: Attrs(NixAttrs(KV { name: String(NixString(Smol("�𛲑ો\\?É'{<?W2𫳫p%"))), value: Integer(134481456438872098) })), z: Attrs(NixAttrs(Empty)) } diff --git a/tvix/eval/src/builtins/impure.rs b/tvix/eval/src/builtins/impure.rs new file mode 100644 index 000000000000..adf43e1a2ea9 --- /dev/null +++ b/tvix/eval/src/builtins/impure.rs @@ -0,0 +1,81 @@ +use builtin_macros::builtins; +use genawaiter::rc::Gen; + +use std::{ + env, + time::{SystemTime, UNIX_EPOCH}, +}; + +use crate::{ + errors::ErrorKind, + io::FileType, + value::NixAttrs, + vm::generators::{self, GenCo}, + NixString, Value, +}; + +#[builtins] +mod impure_builtins { + use super::*; + use crate::builtins::coerce_value_to_path; + + #[builtin("getEnv")] + async fn builtin_get_env(co: GenCo, var: Value) -> Result<Value, ErrorKind> { + Ok(env::var(var.to_str()?).unwrap_or_else(|_| "".into()).into()) + } + + #[builtin("pathExists")] + async fn builtin_path_exists(co: GenCo, path: Value) -> Result<Value, ErrorKind> { + let path = coerce_value_to_path(&co, path).await?; + Ok(generators::request_path_exists(&co, path).await) + } + + #[builtin("readDir")] + async fn builtin_read_dir(co: GenCo, path: Value) -> Result<Value, ErrorKind> { + let path = coerce_value_to_path(&co, path).await?; + + let dir = generators::request_read_dir(&co, path).await; + let res = dir.into_iter().map(|(name, ftype)| { + ( + NixString::from(name.as_str()), + Value::String( + match ftype { + FileType::Directory => "directory", + FileType::Regular => "regular", + FileType::Symlink => "symlink", + FileType::Unknown => "unknown", + } + .into(), + ), + ) + }); + + Ok(Value::attrs(NixAttrs::from_iter(res))) + } + + #[builtin("readFile")] + async fn builtin_read_file(co: GenCo, path: Value) -> Result<Value, ErrorKind> { + let path = coerce_value_to_path(&co, path).await?; + Ok(generators::request_read_to_string(&co, path).await) + } +} + +/// Return all impure builtins, that is all builtins which may perform I/O +/// outside of the VM and so cannot be used in all contexts (e.g. WASM). +pub fn impure_builtins() -> Vec<(&'static str, Value)> { + let mut result = impure_builtins::builtins(); + + // currentTime pins the time at which evaluation was started + { + let seconds = match SystemTime::now().duration_since(UNIX_EPOCH) { + Ok(dur) => dur.as_secs() as i64, + + // This case is hit if the system time is *before* epoch. + Err(err) => -(err.duration().as_secs() as i64), + }; + + result.push(("currentTime", Value::Integer(seconds))); + } + + result +} diff --git a/tvix/eval/src/builtins/mod.rs b/tvix/eval/src/builtins/mod.rs new file mode 100644 index 000000000000..53ad6f3f8e50 --- /dev/null +++ b/tvix/eval/src/builtins/mod.rs @@ -0,0 +1,1037 @@ +//! This module implements the builtins exposed in the Nix language. +//! +//! See //tvix/eval/docs/builtins.md for a some context on the +//! available builtins in Nix. + +use builtin_macros::builtins; +use genawaiter::rc::Gen; +use regex::Regex; +use std::cmp::{self, Ordering}; +use std::collections::VecDeque; +use std::collections::{BTreeMap, HashSet}; +use std::path::PathBuf; + +use crate::arithmetic_op; +use crate::value::PointerEquality; +use crate::vm::generators::{self, GenCo}; +use crate::warnings::WarningKind; +use crate::{ + errors::ErrorKind, + value::{CoercionKind, NixAttrs, NixList, NixString, SharedThunkSet, Thunk, Value}, +}; + +use self::versions::{VersionPart, VersionPartsIter}; + +mod to_xml; +mod versions; + +#[cfg(feature = "impure")] +mod impure; + +#[cfg(feature = "impure")] +pub use impure::impure_builtins; + +// we set TVIX_CURRENT_SYSTEM in build.rs +pub const CURRENT_PLATFORM: &str = env!("TVIX_CURRENT_SYSTEM"); + +/// Coerce a Nix Value to a plain path, e.g. in order to access the +/// file it points to via either `builtins.toPath` or an impure +/// builtin. This coercion can _never_ be performed in a Nix program +/// without using builtins (i.e. the trick `path: /. + path` to +/// convert from a string to a path wouldn't hit this code). +pub async fn coerce_value_to_path(co: &GenCo, v: Value) -> Result<PathBuf, ErrorKind> { + let value = generators::request_force(co, v).await; + if let Value::Path(p) = value { + return Ok(*p); + } + + let vs = generators::request_string_coerce(co, value, CoercionKind::Weak).await; + let path = PathBuf::from(vs.as_str()); + if path.is_absolute() { + Ok(path) + } else { + Err(ErrorKind::NotAnAbsolutePath(path)) + } +} + +#[builtins] +mod pure_builtins { + use crate::value::PointerEquality; + + use super::*; + + #[builtin("abort")] + async fn builtin_abort(co: GenCo, message: Value) -> Result<Value, ErrorKind> { + Err(ErrorKind::Abort(message.to_str()?.to_string())) + } + + #[builtin("add")] + async fn builtin_add(co: GenCo, x: Value, y: Value) -> Result<Value, ErrorKind> { + arithmetic_op!(&x, &y, +) + } + + #[builtin("all")] + async fn builtin_all(co: GenCo, pred: Value, list: Value) -> Result<Value, ErrorKind> { + for value in list.to_list()?.into_iter() { + let pred_result = generators::request_call_with(&co, pred.clone(), [value]).await; + let pred_result = generators::request_force(&co, pred_result).await; + + if !pred_result.as_bool()? { + return Ok(Value::Bool(false)); + } + } + + Ok(Value::Bool(true)) + } + + #[builtin("any")] + async fn builtin_any(co: GenCo, pred: Value, list: Value) -> Result<Value, ErrorKind> { + for value in list.to_list()?.into_iter() { + let pred_result = generators::request_call_with(&co, pred.clone(), [value]).await; + let pred_result = generators::request_force(&co, pred_result).await; + + if pred_result.as_bool()? { + return Ok(Value::Bool(true)); + } + } + + Ok(Value::Bool(false)) + } + + #[builtin("attrNames")] + async fn builtin_attr_names(co: GenCo, set: Value) -> Result<Value, ErrorKind> { + let xs = set.to_attrs()?; + let mut output = Vec::with_capacity(xs.len()); + + for (key, _val) in xs.iter() { + output.push(Value::String(key.clone())); + } + + Ok(Value::List(NixList::construct(output.len(), output))) + } + + #[builtin("attrValues")] + async fn builtin_attr_values(co: GenCo, set: Value) -> Result<Value, ErrorKind> { + let xs = set.to_attrs()?; + let mut output = Vec::with_capacity(xs.len()); + + for (_key, val) in xs.iter() { + output.push(val.clone()); + } + + Ok(Value::List(NixList::construct(output.len(), output))) + } + + #[builtin("baseNameOf")] + async fn builtin_base_name_of(co: GenCo, s: Value) -> Result<Value, ErrorKind> { + let s = s.coerce_to_string(co, CoercionKind::Weak).await?.to_str()?; + let result: String = s.rsplit_once('/').map(|(_, x)| x).unwrap_or(&s).into(); + Ok(result.into()) + } + + #[builtin("bitAnd")] + async fn builtin_bit_and(co: GenCo, x: Value, y: Value) -> Result<Value, ErrorKind> { + Ok(Value::Integer(x.as_int()? & y.as_int()?)) + } + + #[builtin("bitOr")] + async fn builtin_bit_or(co: GenCo, x: Value, y: Value) -> Result<Value, ErrorKind> { + Ok(Value::Integer(x.as_int()? | y.as_int()?)) + } + + #[builtin("bitXor")] + async fn builtin_bit_xor(co: GenCo, x: Value, y: Value) -> Result<Value, ErrorKind> { + Ok(Value::Integer(x.as_int()? ^ y.as_int()?)) + } + + #[builtin("catAttrs")] + async fn builtin_cat_attrs(co: GenCo, key: Value, list: Value) -> Result<Value, ErrorKind> { + let key = key.to_str()?; + let list = list.to_list()?; + let mut output = vec![]; + + for item in list.into_iter() { + let set = generators::request_force(&co, item).await.to_attrs()?; + + if let Some(value) = set.select(key.as_str()) { + output.push(value.clone()); + } + } + + Ok(Value::List(NixList::construct(output.len(), output))) + } + + #[builtin("ceil")] + async fn builtin_ceil(co: GenCo, double: Value) -> Result<Value, ErrorKind> { + Ok(Value::Integer(double.as_float()?.ceil() as i64)) + } + + #[builtin("compareVersions")] + async fn builtin_compare_versions(co: GenCo, x: Value, y: Value) -> Result<Value, ErrorKind> { + let s1 = x.to_str()?; + let s1 = VersionPartsIter::new_for_cmp(s1.as_str()); + let s2 = y.to_str()?; + let s2 = VersionPartsIter::new_for_cmp(s2.as_str()); + + match s1.cmp(s2) { + std::cmp::Ordering::Less => Ok(Value::Integer(-1)), + std::cmp::Ordering::Equal => Ok(Value::Integer(0)), + std::cmp::Ordering::Greater => Ok(Value::Integer(1)), + } + } + + #[builtin("concatLists")] + async fn builtin_concat_lists(co: GenCo, lists: Value) -> Result<Value, ErrorKind> { + let mut out = imbl::Vector::new(); + + for value in lists.to_list()? { + let list = generators::request_force(&co, value).await.to_list()?; + out.extend(list.into_iter()); + } + + Ok(Value::List(out.into())) + } + + #[builtin("concatMap")] + async fn builtin_concat_map(co: GenCo, f: Value, list: Value) -> Result<Value, ErrorKind> { + let list = list.to_list()?; + let mut res = imbl::Vector::new(); + for val in list { + let out = generators::request_call_with(&co, f.clone(), [val]).await; + let out = generators::request_force(&co, out).await; + res.extend(out.to_list()?); + } + Ok(Value::List(res.into())) + } + + #[builtin("concatStringsSep")] + async fn builtin_concat_strings_sep( + co: GenCo, + separator: Value, + list: Value, + ) -> Result<Value, ErrorKind> { + let separator = separator.to_str()?; + let list = list.to_list()?; + let mut res = String::new(); + for (i, val) in list.into_iter().enumerate() { + if i != 0 { + res.push_str(&separator); + } + let s = generators::request_string_coerce(&co, val, CoercionKind::Weak).await; + res.push_str(s.as_str()); + } + Ok(res.into()) + } + + #[builtin("deepSeq")] + async fn builtin_deep_seq(co: GenCo, x: Value, y: Value) -> Result<Value, ErrorKind> { + generators::request_deep_force(&co, x, SharedThunkSet::default()).await; + Ok(y) + } + + #[builtin("div")] + async fn builtin_div(co: GenCo, x: Value, y: Value) -> Result<Value, ErrorKind> { + arithmetic_op!(&x, &y, /) + } + + #[builtin("dirOf")] + async fn builtin_dir_of(co: GenCo, s: Value) -> Result<Value, ErrorKind> { + let is_path = s.is_path(); + let str = s.coerce_to_string(co, CoercionKind::Weak).await?.to_str()?; + let result = str + .rsplit_once('/') + .map(|(x, _)| match x { + "" => "/", + _ => x, + }) + .unwrap_or("."); + if is_path { + Ok(Value::Path(Box::new(result.into()))) + } else { + Ok(result.into()) + } + } + + #[builtin("elem")] + async fn builtin_elem(co: GenCo, x: Value, xs: Value) -> Result<Value, ErrorKind> { + for val in xs.to_list()? { + if generators::check_equality(&co, x.clone(), val, PointerEquality::AllowAll).await? { + return Ok(true.into()); + } + } + Ok(false.into()) + } + + #[builtin("elemAt")] + async fn builtin_elem_at(co: GenCo, xs: Value, i: Value) -> Result<Value, ErrorKind> { + let xs = xs.to_list()?; + let i = i.as_int()?; + if i < 0 { + Err(ErrorKind::IndexOutOfBounds { index: i }) + } else { + match xs.get(i as usize) { + Some(x) => Ok(x.clone()), + None => Err(ErrorKind::IndexOutOfBounds { index: i }), + } + } + } + + #[builtin("filter")] + async fn builtin_filter(co: GenCo, pred: Value, list: Value) -> Result<Value, ErrorKind> { + let list: NixList = list.to_list()?; + let mut out = imbl::Vector::new(); + + for value in list { + let result = generators::request_call_with(&co, pred.clone(), [value.clone()]).await; + + if generators::request_force(&co, result).await.as_bool()? { + out.push_back(value); + } + } + + Ok(Value::List(out.into())) + } + + #[builtin("floor")] + async fn builtin_floor(co: GenCo, double: Value) -> Result<Value, ErrorKind> { + Ok(Value::Integer(double.as_float()?.floor() as i64)) + } + + #[builtin("foldl'")] + async fn builtin_foldl( + co: GenCo, + op: Value, + #[lazy] nul: Value, + list: Value, + ) -> Result<Value, ErrorKind> { + let mut nul = nul; + let list = list.to_list()?; + for val in list { + // Every call of `op` is forced immediately, but `nul` is not, see + // https://github.com/NixOS/nix/blob/940e9eb8/src/libexpr/primops.cc#L3069-L3070C36 + // and our tests for foldl'. + nul = generators::request_call_with(&co, op.clone(), [nul, val]).await; + nul = generators::request_force(&co, nul).await; + } + + Ok(nul) + } + + #[builtin("functionArgs")] + async fn builtin_function_args(co: GenCo, f: Value) -> Result<Value, ErrorKind> { + let lambda = &f.as_closure()?.lambda(); + let formals = if let Some(formals) = &lambda.formals { + formals + } else { + return Ok(Value::attrs(NixAttrs::empty())); + }; + Ok(Value::attrs(NixAttrs::from_iter( + formals.arguments.iter().map(|(k, v)| (k.clone(), (*v))), + ))) + } + + #[builtin("fromJSON")] + async fn builtin_from_json(co: GenCo, json: Value) -> Result<Value, ErrorKind> { + let json_str = json.to_str()?; + + serde_json::from_str(&json_str).map_err(|err| err.into()) + } + + #[builtin("toJSON")] + async fn builtin_to_json(co: GenCo, val: Value) -> Result<Value, ErrorKind> { + let json_value = val.to_json(&co).await?; + let json_str = serde_json::to_string(&json_value)?; + Ok(json_str.into()) + } + + #[builtin("fromTOML")] + async fn builtin_from_toml(co: GenCo, toml: Value) -> Result<Value, ErrorKind> { + let toml_str = toml.to_str()?; + + toml::from_str(&toml_str).map_err(|err| err.into()) + } + + #[builtin("genericClosure")] + async fn builtin_generic_closure(co: GenCo, input: Value) -> Result<Value, ErrorKind> { + let attrs = input.to_attrs()?; + + // The work set is maintained as a VecDeque because new items + // are popped from the front. + let mut work_set: VecDeque<Value> = + generators::request_force(&co, attrs.select_required("startSet")?.clone()) + .await + .to_list()? + .into_iter() + .collect(); + + let operator = attrs.select_required("operator")?; + + let mut res = imbl::Vector::new(); + let mut done_keys: Vec<Value> = vec![]; + + while let Some(val) = work_set.pop_front() { + let val = generators::request_force(&co, val).await; + let attrs = val.to_attrs()?; + let key = attrs.select_required("key")?; + + if !bgc_insert_key(&co, key.clone(), &mut done_keys).await? { + continue; + } + + res.push_back(val.clone()); + + let op_result = generators::request_force( + &co, + generators::request_call_with(&co, operator.clone(), [val]).await, + ) + .await; + + work_set.extend(op_result.to_list()?.into_iter()); + } + + Ok(Value::List(NixList::from(res))) + } + + #[builtin("genList")] + async fn builtin_gen_list( + co: GenCo, + generator: Value, + length: Value, + ) -> Result<Value, ErrorKind> { + let mut out = imbl::Vector::<Value>::new(); + let len = length.as_int()?; + // the best span we can get… + let span = generators::request_span(&co).await; + + for i in 0..len { + let val = Value::Thunk(Thunk::new_suspended_call( + generator.clone(), + i.into(), + span.clone(), + )); + out.push_back(val); + } + + Ok(Value::List(out.into())) + } + + #[builtin("getAttr")] + async fn builtin_get_attr(co: GenCo, key: Value, set: Value) -> Result<Value, ErrorKind> { + let k = key.to_str()?; + let xs = set.to_attrs()?; + + match xs.select(k.as_str()) { + Some(x) => Ok(x.clone()), + None => Err(ErrorKind::AttributeNotFound { + name: k.to_string(), + }), + } + } + + #[builtin("groupBy")] + async fn builtin_group_by(co: GenCo, f: Value, list: Value) -> Result<Value, ErrorKind> { + let mut res: BTreeMap<NixString, imbl::Vector<Value>> = BTreeMap::new(); + for val in list.to_list()? { + let key = generators::request_force( + &co, + generators::request_call_with(&co, f.clone(), [val.clone()]).await, + ) + .await + .to_str()?; + + res.entry(key) + .or_insert_with(imbl::Vector::new) + .push_back(val); + } + Ok(Value::attrs(NixAttrs::from_iter( + res.into_iter() + .map(|(k, v)| (k, Value::List(NixList::from(v)))), + ))) + } + + #[builtin("hasAttr")] + async fn builtin_has_attr(co: GenCo, key: Value, set: Value) -> Result<Value, ErrorKind> { + let k = key.to_str()?; + let xs = set.to_attrs()?; + + Ok(Value::Bool(xs.contains(k.as_str()))) + } + + #[builtin("head")] + async fn builtin_head(co: GenCo, list: Value) -> Result<Value, ErrorKind> { + match list.to_list()?.get(0) { + Some(x) => Ok(x.clone()), + None => Err(ErrorKind::IndexOutOfBounds { index: 0 }), + } + } + + #[builtin("intersectAttrs")] + async fn builtin_intersect_attrs(co: GenCo, x: Value, y: Value) -> Result<Value, ErrorKind> { + let attrs1 = x.to_attrs()?; + let attrs2 = y.to_attrs()?; + let res = attrs2.iter().filter_map(|(k, v)| { + if attrs1.contains(k) { + Some((k.clone(), v.clone())) + } else { + None + } + }); + Ok(Value::attrs(NixAttrs::from_iter(res))) + } + + #[builtin("isAttrs")] + async fn builtin_is_attrs(co: GenCo, value: Value) -> Result<Value, ErrorKind> { + Ok(Value::Bool(matches!(value, Value::Attrs(_)))) + } + + #[builtin("isBool")] + async fn builtin_is_bool(co: GenCo, value: Value) -> Result<Value, ErrorKind> { + Ok(Value::Bool(matches!(value, Value::Bool(_)))) + } + + #[builtin("isFloat")] + async fn builtin_is_float(co: GenCo, value: Value) -> Result<Value, ErrorKind> { + Ok(Value::Bool(matches!(value, Value::Float(_)))) + } + + #[builtin("isFunction")] + async fn builtin_is_function(co: GenCo, value: Value) -> Result<Value, ErrorKind> { + Ok(Value::Bool(matches!( + value, + Value::Closure(_) | Value::Builtin(_) + ))) + } + + #[builtin("isInt")] + async fn builtin_is_int(co: GenCo, value: Value) -> Result<Value, ErrorKind> { + Ok(Value::Bool(matches!(value, Value::Integer(_)))) + } + + #[builtin("isList")] + async fn builtin_is_list(co: GenCo, value: Value) -> Result<Value, ErrorKind> { + Ok(Value::Bool(matches!(value, Value::List(_)))) + } + + #[builtin("isNull")] + async fn builtin_is_null(co: GenCo, value: Value) -> Result<Value, ErrorKind> { + Ok(Value::Bool(matches!(value, Value::Null))) + } + + #[builtin("isPath")] + async fn builtin_is_path(co: GenCo, value: Value) -> Result<Value, ErrorKind> { + Ok(Value::Bool(matches!(value, Value::Path(_)))) + } + + #[builtin("isString")] + async fn builtin_is_string(co: GenCo, value: Value) -> Result<Value, ErrorKind> { + Ok(Value::Bool(matches!(value, Value::String(_)))) + } + + #[builtin("length")] + async fn builtin_length(co: GenCo, list: Value) -> Result<Value, ErrorKind> { + Ok(Value::Integer(list.to_list()?.len() as i64)) + } + + #[builtin("lessThan")] + async fn builtin_less_than(co: GenCo, x: Value, y: Value) -> Result<Value, ErrorKind> { + Ok(Value::Bool(matches!( + x.nix_cmp_ordering(y, co).await?, + Some(Ordering::Less) + ))) + } + + #[builtin("listToAttrs")] + async fn builtin_list_to_attrs(co: GenCo, list: Value) -> Result<Value, ErrorKind> { + let list = list.to_list()?; + let mut map = BTreeMap::new(); + for val in list { + let attrs = generators::request_force(&co, val).await.to_attrs()?; + let name = generators::request_force(&co, attrs.select_required("name")?.clone()) + .await + .to_str()?; + let value = attrs.select_required("value")?.clone(); + // Map entries earlier in the list take precedence over entries later in the list + map.entry(name).or_insert(value); + } + Ok(Value::attrs(NixAttrs::from_iter(map.into_iter()))) + } + + #[builtin("map")] + async fn builtin_map(co: GenCo, f: Value, list: Value) -> Result<Value, ErrorKind> { + let mut out = imbl::Vector::<Value>::new(); + + // the best span we can get… + let span = generators::request_span(&co).await; + + for val in list.to_list()? { + let result = Value::Thunk(Thunk::new_suspended_call(f.clone(), val, span.clone())); + out.push_back(result) + } + + Ok(Value::List(out.into())) + } + + #[builtin("mapAttrs")] + async fn builtin_map_attrs(co: GenCo, f: Value, attrs: Value) -> Result<Value, ErrorKind> { + let attrs = attrs.to_attrs()?; + let mut out = imbl::OrdMap::new(); + + // the best span we can get… + let span = generators::request_span(&co).await; + + for (key, value) in attrs.into_iter() { + let result = Value::Thunk(Thunk::new_suspended_call( + f.clone(), + key.clone().into(), + span.clone(), + )); + let result = Value::Thunk(Thunk::new_suspended_call(result, value, span.clone())); + + out.insert(key, result); + } + + Ok(Value::attrs(out.into())) + } + + #[builtin("match")] + async fn builtin_match(co: GenCo, regex: Value, str: Value) -> Result<Value, ErrorKind> { + let s = str.to_str()?; + let re = regex.to_str()?; + let re: Regex = Regex::new(&format!("^{}$", re.as_str())).unwrap(); + match re.captures(&s) { + Some(caps) => Ok(Value::List( + caps.iter() + .skip(1) + .map(|grp| grp.map(|g| Value::from(g.as_str())).unwrap_or(Value::Null)) + .collect::<imbl::Vector<Value>>() + .into(), + )), + None => Ok(Value::Null), + } + } + + #[builtin("mul")] + async fn builtin_mul(co: GenCo, x: Value, y: Value) -> Result<Value, ErrorKind> { + arithmetic_op!(&x, &y, *) + } + + #[builtin("parseDrvName")] + async fn builtin_parse_drv_name(co: GenCo, s: Value) -> Result<Value, ErrorKind> { + // This replicates cppnix's (mis?)handling of codepoints + // above U+007f following 0x2d ('-') + let s = s.to_str()?; + let slice: &[u8] = s.as_str().as_ref(); + let (name, dash_and_version) = slice.split_at( + slice + .windows(2) + .enumerate() + .find_map(|x| match x { + (idx, [b'-', c1]) if !c1.is_ascii_alphabetic() => Some(idx), + _ => None, + }) + .unwrap_or(slice.len()), + ); + let version = dash_and_version + .split_first() + .map(|x| core::str::from_utf8(x.1)) + .unwrap_or(Ok(""))?; + Ok(Value::attrs(NixAttrs::from_iter( + [("name", core::str::from_utf8(name)?), ("version", version)].into_iter(), + ))) + } + + #[builtin("partition")] + async fn builtin_partition(co: GenCo, pred: Value, list: Value) -> Result<Value, ErrorKind> { + let mut right: imbl::Vector<Value> = Default::default(); + let mut wrong: imbl::Vector<Value> = Default::default(); + + let list: NixList = list.to_list()?; + for elem in list { + let result = generators::request_call_with(&co, pred.clone(), [elem.clone()]).await; + + if generators::request_force(&co, result).await.as_bool()? { + right.push_back(elem); + } else { + wrong.push_back(elem); + }; + } + + let res = [ + ("right", Value::List(NixList::from(right))), + ("wrong", Value::List(NixList::from(wrong))), + ]; + + Ok(Value::attrs(NixAttrs::from_iter(res.into_iter()))) + } + + #[builtin("removeAttrs")] + async fn builtin_remove_attrs( + co: GenCo, + attrs: Value, + keys: Value, + ) -> Result<Value, ErrorKind> { + let attrs = attrs.to_attrs()?; + let keys = keys + .to_list()? + .into_iter() + .map(|v| v.to_str()) + .collect::<Result<HashSet<_>, _>>()?; + let res = attrs.iter().filter_map(|(k, v)| { + if !keys.contains(k) { + Some((k.clone(), v.clone())) + } else { + None + } + }); + Ok(Value::attrs(NixAttrs::from_iter(res))) + } + + #[builtin("replaceStrings")] + async fn builtin_replace_strings( + co: GenCo, + from: Value, + to: Value, + s: Value, + ) -> Result<Value, ErrorKind> { + let from = from.to_list()?; + for val in &from { + generators::request_force(&co, val.clone()).await; + } + + let to = to.to_list()?; + for val in &to { + generators::request_force(&co, val.clone()).await; + } + + let string = s.to_str()?; + + let mut res = String::new(); + + let mut i: usize = 0; + let mut empty_string_replace = false; + + // This can't be implemented using Rust's string.replace() as + // well as a map because we need to handle errors with results + // as well as "reset" the iterator to zero for the replacement + // everytime there's a successful match. + // Also, Rust's string.replace allocates a new string + // on every call which is not preferable. + 'outer: while i < string.len() { + // Try a match in all the from strings + for elem in std::iter::zip(from.iter(), to.iter()) { + let from = elem.0.to_str()?; + let to = elem.1.to_str()?; + + if i + from.len() >= string.len() { + continue; + } + + // We already applied a from->to with an empty from + // transformation. + // Let's skip it so that we don't loop infinitely + if empty_string_replace && from.as_str().is_empty() { + continue; + } + + // if we match the `from` string, let's replace + if &string[i..i + from.len()] == from.as_str() { + res += &to; + i += from.len(); + + // remember if we applied the empty from->to + empty_string_replace = from.as_str().is_empty(); + + continue 'outer; + } + } + + // If we don't match any `from`, we simply add a character + res += &string[i..i + 1]; + i += 1; + + // Since we didn't apply anything transformation, + // we reset the empty string replacement + empty_string_replace = false; + } + + // Special case when the string is empty or at the string's end + // and one of the from is also empty + for elem in std::iter::zip(from.iter(), to.iter()) { + let from = elem.0.to_str()?; + let to = elem.1.to_str()?; + + if from.as_str().is_empty() { + res += &to; + break; + } + } + Ok(Value::String(res.into())) + } + + #[builtin("seq")] + async fn builtin_seq(co: GenCo, _x: Value, y: Value) -> Result<Value, ErrorKind> { + // The builtin calling infra has already forced both args for us, so + // we just return the second and ignore the first + Ok(y) + } + + #[builtin("split")] + async fn builtin_split(co: GenCo, regex: Value, str: Value) -> Result<Value, ErrorKind> { + let s = str.to_str()?; + let text = s.as_str(); + let re = regex.to_str()?; + let re: Regex = Regex::new(re.as_str()).unwrap(); + let mut capture_locations = re.capture_locations(); + let num_captures = capture_locations.len(); + let mut ret = imbl::Vector::new(); + let mut pos = 0; + + while let Some(thematch) = re.captures_read_at(&mut capture_locations, text, pos) { + // push the unmatched characters preceding the match + ret.push_back(Value::from(&text[pos..thematch.start()])); + + // Push a list with one element for each capture + // group in the regex, containing the characters + // matched by that capture group, or null if no match. + // We skip capture 0; it represents the whole match. + let v: imbl::Vector<Value> = (1..num_captures) + .map(|i| capture_locations.get(i)) + .map(|o| { + o.map(|(start, end)| Value::from(&text[start..end])) + .unwrap_or(Value::Null) + }) + .collect(); + ret.push_back(Value::List(NixList::from(v))); + pos = thematch.end(); + } + + // push the unmatched characters following the last match + ret.push_back(Value::from(&text[pos..])); + + Ok(Value::List(NixList::from(ret))) + } + + #[builtin("sort")] + async fn builtin_sort(co: GenCo, comparator: Value, list: Value) -> Result<Value, ErrorKind> { + let list = list.to_list()?; + let sorted = list.sort_by(&co, comparator).await?; + Ok(Value::List(sorted)) + } + + #[builtin("splitVersion")] + async fn builtin_split_version(co: GenCo, s: Value) -> Result<Value, ErrorKind> { + let s = s.to_str()?; + let s = VersionPartsIter::new(s.as_str()); + + let parts = s + .map(|s| { + Value::String(match s { + VersionPart::Number(n) => n.into(), + VersionPart::Word(w) => w.into(), + }) + }) + .collect::<Vec<Value>>(); + Ok(Value::List(NixList::construct(parts.len(), parts))) + } + + #[builtin("stringLength")] + async fn builtin_string_length(co: GenCo, #[lazy] s: Value) -> Result<Value, ErrorKind> { + // also forces the value + let s = s.coerce_to_string(co, CoercionKind::Weak).await?; + Ok(Value::Integer(s.to_str()?.as_str().len() as i64)) + } + + #[builtin("sub")] + async fn builtin_sub(co: GenCo, x: Value, y: Value) -> Result<Value, ErrorKind> { + arithmetic_op!(&x, &y, -) + } + + #[builtin("substring")] + async fn builtin_substring( + co: GenCo, + start: Value, + len: Value, + s: Value, + ) -> Result<Value, ErrorKind> { + let beg = start.as_int()?; + let len = len.as_int()?; + let x = s.coerce_to_string(co, CoercionKind::Weak).await?.to_str()?; + + if beg < 0 { + return Err(ErrorKind::IndexOutOfBounds { index: beg }); + } + let beg = beg as usize; + + // Nix doesn't assert that the length argument is + // non-negative when the starting index is GTE the + // string's length. + if beg >= x.as_str().len() { + return Ok(Value::String("".into())); + } + + if len < 0 { + return Err(ErrorKind::NegativeLength { length: len }); + } + + let len = len as usize; + let end = cmp::min(beg + len, x.as_str().len()); + + Ok(Value::String(x.as_str()[beg..end].into())) + } + + #[builtin("tail")] + async fn builtin_tail(co: GenCo, list: Value) -> Result<Value, ErrorKind> { + let xs = list.to_list()?; + + if xs.is_empty() { + Err(ErrorKind::TailEmptyList) + } else { + let output = xs.into_iter().skip(1).collect::<Vec<_>>(); + Ok(Value::List(NixList::construct(output.len(), output))) + } + } + + #[builtin("throw")] + async fn builtin_throw(co: GenCo, message: Value) -> Result<Value, ErrorKind> { + Err(ErrorKind::Throw(message.to_str()?.to_string())) + } + + #[builtin("toString")] + async fn builtin_to_string(co: GenCo, #[lazy] x: Value) -> Result<Value, ErrorKind> { + // coerce_to_string forces for us + x.coerce_to_string(co, CoercionKind::Strong).await + } + + #[builtin("toXML")] + async fn builtin_to_xml(co: GenCo, value: Value) -> Result<Value, ErrorKind> { + let value = generators::request_deep_force(&co, value, SharedThunkSet::default()).await; + let mut buf: Vec<u8> = vec![]; + to_xml::value_to_xml(&mut buf, &value)?; + Ok(String::from_utf8(buf)?.into()) + } + + #[builtin("placeholder")] + async fn builtin_placeholder(co: GenCo, #[lazy] _x: Value) -> Result<Value, ErrorKind> { + generators::emit_warning_kind(&co, WarningKind::NotImplemented("builtins.placeholder")) + .await; + Ok("<builtins.placeholder-is-not-implemented-in-tvix-yet>".into()) + } + + #[builtin("trace")] + async fn builtin_trace(co: GenCo, message: Value, value: Value) -> Result<Value, ErrorKind> { + // TODO(grfn): `trace` should be pluggable and capturable, probably via a method on + // the VM + eprintln!("trace: {} :: {}", message, message.type_of()); + Ok(value) + } + + #[builtin("toPath")] + async fn builtin_to_path(co: GenCo, s: Value) -> Result<Value, ErrorKind> { + let path: Value = crate::value::canon_path(coerce_value_to_path(&co, s).await?).into(); + Ok(path.coerce_to_string(co, CoercionKind::Weak).await?) + } + + #[builtin("tryEval")] + async fn builtin_try_eval(co: GenCo, #[lazy] e: Value) -> Result<Value, ErrorKind> { + let res = match generators::request_try_force(&co, e).await { + Some(value) => [("value", value), ("success", true.into())], + None => [("value", false.into()), ("success", false.into())], + }; + + Ok(Value::attrs(NixAttrs::from_iter(res.into_iter()))) + } + + #[builtin("typeOf")] + async fn builtin_type_of(co: GenCo, x: Value) -> Result<Value, ErrorKind> { + Ok(Value::String(x.type_of().into())) + } +} + +/// Internal helper function for genericClosure, determining whether a +/// value has been seen before. +async fn bgc_insert_key(co: &GenCo, key: Value, done: &mut Vec<Value>) -> Result<bool, ErrorKind> { + for existing in done.iter() { + if generators::check_equality( + co, + existing.clone(), + key.clone(), + // TODO(tazjin): not actually sure which semantics apply here + PointerEquality::ForbidAll, + ) + .await? + { + return Ok(false); + } + } + + done.push(key); + Ok(true) +} + +/// The set of standard pure builtins in Nix, mostly concerned with +/// data structure manipulation (string, attrs, list, etc. functions). +pub fn pure_builtins() -> Vec<(&'static str, Value)> { + let mut result = pure_builtins::builtins(); + + // Pure-value builtins + result.push(("nixVersion", Value::String("2.3-compat-tvix-0.1".into()))); + result.push(("langVersion", Value::Integer(6))); + result.push(("null", Value::Null)); + result.push(("true", Value::Bool(true))); + result.push(("false", Value::Bool(false))); + + result.push(( + "currentSystem", + crate::systems::llvm_triple_to_nix_double(CURRENT_PLATFORM).into(), + )); + + result +} + +#[builtins] +mod placeholder_builtins { + use super::*; + + #[builtin("unsafeDiscardStringContext")] + async fn builtin_unsafe_discard_string_context( + _: GenCo, + #[lazy] s: Value, + ) -> Result<Value, ErrorKind> { + // Tvix does not manually track contexts, and this is a no-op for us. + Ok(s) + } + + #[builtin("addErrorContext")] + async fn builtin_add_error_context( + co: GenCo, + #[lazy] _context: Value, + #[lazy] val: Value, + ) -> Result<Value, ErrorKind> { + generators::emit_warning_kind(&co, WarningKind::NotImplemented("builtins.addErrorContext")) + .await; + Ok(val) + } + + #[builtin("unsafeGetAttrPos")] + async fn builtin_unsafe_get_attr_pos( + co: GenCo, + _name: Value, + _attrset: Value, + ) -> Result<Value, ErrorKind> { + generators::emit_warning_kind( + &co, + WarningKind::NotImplemented("builtins.unsafeGetAttrsPos"), + ) + .await; + let res = [ + ("line", 42.into()), + ("col", 42.into()), + ("file", Value::Path(Box::new("/deep/thought".into()))), + ]; + Ok(Value::attrs(NixAttrs::from_iter(res.into_iter()))) + } +} + +pub fn placeholders() -> Vec<(&'static str, Value)> { + placeholder_builtins::builtins() +} diff --git a/tvix/eval/src/builtins/to_xml.rs b/tvix/eval/src/builtins/to_xml.rs new file mode 100644 index 000000000000..375785b7a63e --- /dev/null +++ b/tvix/eval/src/builtins/to_xml.rs @@ -0,0 +1,139 @@ +//! This module implements `builtins.toXML`, which is a serialisation +//! of value information as well as internal tvix state that several +//! things in nixpkgs rely on. + +use std::{io::Write, rc::Rc}; +use xml::writer::events::XmlEvent; +use xml::writer::EmitterConfig; +use xml::writer::EventWriter; + +use crate::{ErrorKind, Value}; + +/// Recursively serialise a value to XML. The value *must* have been +/// deep-forced before being passed to this function. +pub(super) fn value_to_xml<W: Write>(mut writer: W, value: &Value) -> Result<(), ErrorKind> { + let config = EmitterConfig { + perform_indent: true, + pad_self_closing: true, + + // Nix uses single-quotes *only* in the document declaration, + // so we need to write it manually. + write_document_declaration: false, + ..Default::default() + }; + + // Write a literal document declaration, using C++-Nix-style + // single quotes. + writeln!(writer, "<?xml version='1.0' encoding='utf-8'?>")?; + + let mut writer = EventWriter::new_with_config(writer, config); + + writer.write(XmlEvent::start_element("expr"))?; + value_variant_to_xml(&mut writer, value)?; + writer.write(XmlEvent::end_element())?; + + // Unwrap the writer to add the final newline that C++ Nix adds. + writeln!(writer.into_inner())?; + + Ok(()) +} + +fn write_typed_value<W: Write, V: ToString>( + w: &mut EventWriter<W>, + name: &str, + value: V, +) -> Result<(), ErrorKind> { + w.write(XmlEvent::start_element(name).attr("value", &value.to_string()))?; + w.write(XmlEvent::end_element())?; + Ok(()) +} + +fn value_variant_to_xml<W: Write>(w: &mut EventWriter<W>, value: &Value) -> Result<(), ErrorKind> { + match value { + Value::Thunk(t) => return value_variant_to_xml(w, &t.value()), + + Value::Null => { + w.write(XmlEvent::start_element("null"))?; + w.write(XmlEvent::end_element()) + } + + Value::Bool(b) => return write_typed_value(w, "bool", b), + Value::Integer(i) => return write_typed_value(w, "int", i), + Value::Float(f) => return write_typed_value(w, "float", f), + Value::String(s) => return write_typed_value(w, "string", s.as_str()), + Value::Path(p) => return write_typed_value(w, "path", p.to_string_lossy()), + + Value::List(list) => { + w.write(XmlEvent::start_element("list"))?; + + for elem in list.into_iter() { + value_variant_to_xml(w, elem)?; + } + + w.write(XmlEvent::end_element()) + } + + Value::Attrs(attrs) => { + w.write(XmlEvent::start_element("attrs"))?; + + for elem in attrs.iter() { + w.write(XmlEvent::start_element("attr").attr("name", elem.0.as_str()))?; + value_variant_to_xml(w, elem.1)?; + w.write(XmlEvent::end_element())?; + } + + w.write(XmlEvent::end_element()) + } + + Value::Closure(c) => { + w.write(XmlEvent::start_element("function"))?; + + match &c.lambda.formals { + Some(formals) => { + if formals.ellipsis { + w.write(XmlEvent::start_element("attrspat").attr("ellipsis", "1"))?; + w.write(XmlEvent::end_element())?; + } + + for arg in formals.arguments.iter() { + w.write(XmlEvent::start_element("attr").attr("name", arg.0.as_str()))?; + w.write(XmlEvent::end_element())?; + } + } + None => { + // TODO(tazjin): tvix does not currently persist function + // argument names anywhere (whereas we do for formals, as + // that is required for other runtime behaviour). Because of + // this the implementation here is fake, always returning + // the same argument name. + // + // If we don't want to persist the data, we can re-parse the + // AST from the spans of the lambda's bytecode and figure it + // out that way, but it needs some investigating. + w.write(XmlEvent::start_element("varpat").attr("name", /* fake: */ "x"))?; + w.write(XmlEvent::end_element())?; + } + } + + w.write(XmlEvent::end_element()) + } + + Value::Builtin(_) => { + w.write(XmlEvent::start_element("unevaluated"))?; + w.write(XmlEvent::end_element()) + } + + Value::AttrNotFound + | Value::Blueprint(_) + | Value::DeferredUpvalue(_) + | Value::UnresolvedPath(_) + | Value::Json(_) => { + return Err(ErrorKind::TvixBug { + msg: "internal value variant encountered in builtins.toXML", + metadata: Some(Rc::new(value.clone())), + }) + } + }?; + + Ok(()) +} diff --git a/tvix/eval/src/builtins/versions.rs b/tvix/eval/src/builtins/versions.rs new file mode 100644 index 000000000000..79fb82b868fb --- /dev/null +++ b/tvix/eval/src/builtins/versions.rs @@ -0,0 +1,155 @@ +use std::cmp::Ordering; +use std::iter::{once, Chain, Once}; +use std::ops::RangeInclusive; + +/// Version strings can be broken up into Parts. +/// One Part represents either a string of digits or characters. +/// '.' and '_' represent deviders between parts and are not included in any part. +#[derive(PartialEq, Eq, Clone, Debug)] +pub enum VersionPart<'a> { + Word(&'a str), + Number(&'a str), +} + +impl PartialOrd for VersionPart<'_> { + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + Some(self.cmp(other)) + } +} + +impl Ord for VersionPart<'_> { + fn cmp(&self, other: &Self) -> Ordering { + match (self, other) { + (VersionPart::Number(s1), VersionPart::Number(s2)) => { + // Note: C++ Nix uses `int`, but probably doesn't make a difference + // We trust that the splitting was done correctly and parsing will work + let n1: u64 = s1.parse().unwrap(); + let n2: u64 = s2.parse().unwrap(); + n1.cmp(&n2) + } + + // `pre` looses unless the other part is also a `pre` + (VersionPart::Word("pre"), VersionPart::Word("pre")) => Ordering::Equal, + (VersionPart::Word("pre"), _) => Ordering::Less, + (_, VersionPart::Word("pre")) => Ordering::Greater, + + // Number wins against Word + (VersionPart::Number(_), VersionPart::Word(_)) => Ordering::Greater, + (VersionPart::Word(_), VersionPart::Number(_)) => Ordering::Less, + + (VersionPart::Word(w1), VersionPart::Word(w2)) => w1.cmp(w2), + } + } +} + +/// Type used to hold information about a VersionPart during creation +enum InternalPart { + Number { range: RangeInclusive<usize> }, + Word { range: RangeInclusive<usize> }, + Break, +} + +/// An iterator which yields the parts of a version string. +/// +/// This can then be directly used to compare two versions +pub struct VersionPartsIter<'a> { + cached_part: InternalPart, + iter: std::str::CharIndices<'a>, + version: &'a str, +} + +impl<'a> VersionPartsIter<'a> { + pub fn new(version: &'a str) -> Self { + Self { + cached_part: InternalPart::Break, + iter: version.char_indices(), + version, + } + } + + /// Create an iterator that yields all version parts followed by an additional + /// `VersionPart::Word("")` part (i.e. you can think of this as + /// `builtins.splitVersion version ++ [ "" ]`). This is necessary, because + /// Nix's `compareVersions` is not entirely lexicographical: If we have two + /// equal versions, but one is longer, the longer one is only considered + /// greater if the first additional part of the longer version is not `pre`, + /// e.g. `2.3 > 2.3pre`. It is otherwise lexicographical, so peculiar behavior + /// like `2.3 < 2.3.0pre` ensues. Luckily for us, this means that we can + /// lexicographically compare two version strings, _if_ we append an extra + /// component to both versions. + pub fn new_for_cmp(version: &'a str) -> Chain<Self, Once<VersionPart>> { + Self::new(version).chain(once(VersionPart::Word(""))) + } +} + +impl<'a> Iterator for VersionPartsIter<'a> { + type Item = VersionPart<'a>; + + fn next(&mut self) -> Option<Self::Item> { + let char = self.iter.next(); + + if char.is_none() { + let cached_part = std::mem::replace(&mut self.cached_part, InternalPart::Break); + match cached_part { + InternalPart::Break => return None, + InternalPart::Number { range } => { + return Some(VersionPart::Number(&self.version[range])) + } + InternalPart::Word { range } => { + return Some(VersionPart::Word(&self.version[range])) + } + } + } + + let (pos, char) = char.unwrap(); + match char { + // Divider encountered + '.' | '-' => { + let cached_part = std::mem::replace(&mut self.cached_part, InternalPart::Break); + match cached_part { + InternalPart::Number { range } => { + Some(VersionPart::Number(&self.version[range])) + } + InternalPart::Word { range } => Some(VersionPart::Word(&self.version[range])), + InternalPart::Break => self.next(), + } + } + + // digit encountered + _ if char.is_ascii_digit() => { + let cached_part = std::mem::replace( + &mut self.cached_part, + InternalPart::Number { range: pos..=pos }, + ); + match cached_part { + InternalPart::Number { range } => { + self.cached_part = InternalPart::Number { + range: *range.start()..=*range.end() + 1, + }; + self.next() + } + InternalPart::Word { range } => Some(VersionPart::Word(&self.version[range])), + InternalPart::Break => self.next(), + } + } + + // char encountered + _ => { + let mut cached_part = InternalPart::Word { range: pos..=pos }; + std::mem::swap(&mut cached_part, &mut self.cached_part); + match cached_part { + InternalPart::Word { range } => { + self.cached_part = InternalPart::Word { + range: *range.start()..=*range.end() + char.len_utf8(), + }; + self.next() + } + InternalPart::Number { range } => { + Some(VersionPart::Number(&self.version[range])) + } + InternalPart::Break => self.next(), + } + } + } + } +} diff --git a/tvix/eval/src/chunk.rs b/tvix/eval/src/chunk.rs new file mode 100644 index 000000000000..9d35b30c64a6 --- /dev/null +++ b/tvix/eval/src/chunk.rs @@ -0,0 +1,279 @@ +use std::io::Write; +use std::ops::{Index, IndexMut}; + +use crate::opcode::{CodeIdx, ConstantIdx, OpCode}; +use crate::value::Value; +use crate::SourceCode; + +/// Represents a source location from which one or more operations +/// were compiled. +/// +/// The span itself is an index into a [codemap::Codemap], and the +/// structure tracks the number of operations that were yielded from +/// the same span. +/// +/// At error reporting time, it becomes possible to either just fetch +/// the textual representation of that span from the codemap, or to +/// even re-parse the AST using rnix to create more semantically +/// interesting errors. +#[derive(Clone, Debug, PartialEq)] +struct SourceSpan { + /// Span into the [codemap::Codemap]. + span: codemap::Span, + + /// Index of the first operation covered by this span. + start: usize, +} + +/// A chunk is a representation of a sequence of bytecode +/// instructions, associated constants and additional metadata as +/// emitted by the compiler. +#[derive(Debug, Default)] +pub struct Chunk { + pub code: Vec<OpCode>, + pub constants: Vec<Value>, + spans: Vec<SourceSpan>, +} + +impl Index<ConstantIdx> for Chunk { + type Output = Value; + + fn index(&self, index: ConstantIdx) -> &Self::Output { + &self.constants[index.0] + } +} + +impl Index<CodeIdx> for Chunk { + type Output = OpCode; + + fn index(&self, index: CodeIdx) -> &Self::Output { + &self.code[index.0] + } +} + +impl IndexMut<CodeIdx> for Chunk { + fn index_mut(&mut self, index: CodeIdx) -> &mut Self::Output { + &mut self.code[index.0] + } +} + +impl Chunk { + pub fn push_op(&mut self, data: OpCode, span: codemap::Span) -> CodeIdx { + let idx = self.code.len(); + self.code.push(data); + self.push_span(span, idx); + CodeIdx(idx) + } + + /// Get the first span of a chunk, no questions asked. + pub fn first_span(&self) -> codemap::Span { + self.spans[0].span + } + + /// Pop the last operation from the chunk and clean up its tracked + /// span. Used when the compiler backtracks. + pub fn pop_op(&mut self) { + // Simply drop the last op. + self.code.pop(); + + if let Some(span) = self.spans.last() { + // If the last span started at this op, drop it. + if span.start == self.code.len() { + self.spans.pop(); + } + } + } + + pub fn push_constant(&mut self, data: Value) -> ConstantIdx { + let idx = self.constants.len(); + self.constants.push(data); + ConstantIdx(idx) + } + + // Span tracking implementation + + fn push_span(&mut self, span: codemap::Span, start: usize) { + match self.spans.last_mut() { + // We do not need to insert the same span again, as this + // instruction was compiled from the same span as the last + // one. + Some(last) if last.span == span => {} + + // In all other cases, this is a new source span. + _ => self.spans.push(SourceSpan { span, start }), + } + } + + /// Retrieve the [codemap::Span] from which the instruction at + /// `offset` was compiled. + pub fn get_span(&self, offset: CodeIdx) -> codemap::Span { + let position = self + .spans + .binary_search_by(|span| span.start.cmp(&offset.0)); + + let span = match position { + Ok(index) => &self.spans[index], + Err(index) => { + if index == 0 { + &self.spans[0] + } else { + &self.spans[index - 1] + } + } + }; + + span.span + } + + /// Write the disassembler representation of the operation at + /// `idx` to the specified writer. + pub fn disassemble_op<W: Write>( + &self, + writer: &mut W, + source: &SourceCode, + width: usize, + idx: CodeIdx, + ) -> Result<(), std::io::Error> { + write!(writer, "{:#width$x}\t ", idx.0, width = width)?; + + // Print continuation character if the previous operation was at + // the same line, otherwise print the line. + let line = source.get_line(self.get_span(idx)); + if idx.0 > 0 && source.get_line(self.get_span(CodeIdx(idx.0 - 1))) == line { + write!(writer, " |\t")?; + } else { + write!(writer, "{:4}\t", line)?; + } + + match self[idx] { + OpCode::OpConstant(idx) => { + let val_str = match &self[idx] { + Value::Thunk(t) => t.debug_repr(), + Value::Closure(c) => format!("closure({:p})", c.lambda), + val => format!("{}", val), + }; + + writeln!(writer, "OpConstant({}@{})", val_str, idx.0) + } + op => writeln!(writer, "{:?}", op), + }?; + + Ok(()) + } + + /// Extend this chunk with the content of another, moving out of the other + /// in the process. + /// + /// This is used by the compiler when it detects that it unnecessarily + /// thunked a nested expression. + pub fn extend(&mut self, other: Self) { + // Some operations need to be modified in certain ways before being + // valid as part of the new chunk. + let const_count = self.constants.len(); + for (idx, op) in other.code.iter().enumerate() { + let span = other.get_span(CodeIdx(idx)); + match op { + // As the constants shift, the index needs to be moved relatively. + OpCode::OpConstant(ConstantIdx(idx)) => { + self.push_op(OpCode::OpConstant(ConstantIdx(idx + const_count)), span) + } + + // Other operations either operate on relative offsets, or no + // offsets, and are safe to keep as-is. + _ => self.push_op(*op, span), + }; + } + + self.constants.extend(other.constants); + self.spans.extend(other.spans); + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::test_utils::dummy_span; + + // Note: These tests are about the functionality of the `Chunk` type, the + // opcodes used below do *not* represent valid, executable Tvix code (and + // don't need to). + + #[test] + fn push_op() { + let mut chunk = Chunk::default(); + chunk.push_op(OpCode::OpAdd, dummy_span()); + assert_eq!(chunk.code.last().unwrap(), &OpCode::OpAdd); + } + + #[test] + fn extend_empty() { + let mut chunk = Chunk::default(); + chunk.push_op(OpCode::OpAdd, dummy_span()); + + let other = Chunk::default(); + chunk.extend(other); + + assert_eq!( + chunk.code, + vec![OpCode::OpAdd], + "code should not have changed" + ); + } + + #[test] + fn extend_simple() { + let span = dummy_span(); + let mut chunk = Chunk::default(); + chunk.push_op(OpCode::OpAdd, span); + + let mut other = Chunk::default(); + other.push_op(OpCode::OpSub, span); + other.push_op(OpCode::OpMul, span); + + let expected_code = vec![OpCode::OpAdd, OpCode::OpSub, OpCode::OpMul]; + + chunk.extend(other); + + assert_eq!(chunk.code, expected_code, "code should have been extended"); + } + + #[test] + fn extend_with_constant() { + let span = dummy_span(); + let mut chunk = Chunk::default(); + chunk.push_op(OpCode::OpAdd, span); + let cidx = chunk.push_constant(Value::Integer(0)); + assert_eq!( + cidx.0, 0, + "first constant in main chunk should have index 0" + ); + chunk.push_op(OpCode::OpConstant(cidx), span); + + let mut other = Chunk::default(); + other.push_op(OpCode::OpSub, span); + let other_cidx = other.push_constant(Value::Integer(1)); + assert_eq!( + other_cidx.0, 0, + "first constant in other chunk should have index 0" + ); + other.push_op(OpCode::OpConstant(other_cidx), span); + + chunk.extend(other); + + let expected_code = vec![ + OpCode::OpAdd, + OpCode::OpConstant(ConstantIdx(0)), + OpCode::OpSub, + OpCode::OpConstant(ConstantIdx(1)), // <- note: this was rewritten + ]; + + assert_eq!( + chunk.code, expected_code, + "code should have been extended and rewritten" + ); + + assert_eq!(chunk.constants.len(), 2); + assert!(matches!(chunk.constants[0], Value::Integer(0))); + assert!(matches!(chunk.constants[1], Value::Integer(1))); + } +} diff --git a/tvix/eval/src/compiler/bindings.rs b/tvix/eval/src/compiler/bindings.rs new file mode 100644 index 000000000000..a3d7c6fbfb33 --- /dev/null +++ b/tvix/eval/src/compiler/bindings.rs @@ -0,0 +1,826 @@ +//! This module implements compiler logic related to name/value binding +//! definitions (that is, attribute sets and let-expressions). +//! +//! In the case of recursive scopes these cases share almost all of their +//! (fairly complex) logic. + +use std::iter::Peekable; + +use rnix::ast::HasEntry; +use rowan::ast::AstChildren; + +use super::*; + +type PeekableAttrs = Peekable<AstChildren<ast::Attr>>; + +/// What kind of bindings scope is being compiled? +#[derive(Clone, Copy, PartialEq)] +enum BindingsKind { + /// Standard `let ... in ...`-expression. + LetIn, + + /// Non-recursive attribute set. + Attrs, + + /// Recursive attribute set. + RecAttrs, +} + +impl BindingsKind { + fn is_attrs(&self) -> bool { + matches!(self, BindingsKind::Attrs | BindingsKind::RecAttrs) + } +} + +// Internal representation of an attribute set used for merging sets, or +// inserting nested keys. +#[derive(Clone)] +struct AttributeSet { + /// Original span at which this set was first encountered. + span: Span, + + /// Tracks the kind of set (rec or not). + kind: BindingsKind, + + /// All inherited entries + inherits: Vec<ast::Inherit>, + + /// All internal entries + entries: Vec<(Span, PeekableAttrs, ast::Expr)>, +} + +impl ToSpan for AttributeSet { + fn span_for(&self, _: &codemap::File) -> Span { + self.span + } +} + +impl AttributeSet { + fn from_ast(c: &Compiler, node: &ast::AttrSet) -> Self { + AttributeSet { + span: c.span_for(node), + + // Kind of the attrs depends on the first time it is + // encountered. We actually believe this to be a Nix + // bug: https://github.com/NixOS/nix/issues/7111 + kind: if node.rec_token().is_some() { + BindingsKind::RecAttrs + } else { + BindingsKind::Attrs + }, + + inherits: ast::HasEntry::inherits(node).collect(), + + entries: ast::HasEntry::attrpath_values(node) + .map(|entry| { + let span = c.span_for(&entry); + ( + span, + entry.attrpath().unwrap().attrs().peekable(), + entry.value().unwrap(), + ) + }) + .collect(), + } + } +} + +// Data structures to track the bindings observed in the second pass, and +// forward the information needed to compile their value. +enum Binding { + InheritFrom { + namespace: ast::Expr, + name: SmolStr, + span: Span, + }, + + Plain { + expr: ast::Expr, + }, + + Set(AttributeSet), +} + +impl Binding { + /// Merge the provided value into the current binding, or emit an + /// error if this turns out to be impossible. + fn merge( + &mut self, + c: &mut Compiler, + span: Span, + mut remaining_path: PeekableAttrs, + value: ast::Expr, + ) { + match self { + Binding::InheritFrom { name, ref span, .. } => { + c.emit_error(span, ErrorKind::UnmergeableInherit { name: name.clone() }) + } + + // If the value is not yet a nested binding, flip the representation + // and recurse. + Binding::Plain { expr } => match expr { + ast::Expr::AttrSet(existing) => { + let nested = AttributeSet::from_ast(c, existing); + *self = Binding::Set(nested); + self.merge(c, span, remaining_path, value); + } + + _ => c.emit_error(&value, ErrorKind::UnmergeableValue), + }, + + // If the value is nested further, it is simply inserted into the + // bindings with its full path and resolved recursively further + // down. + Binding::Set(existing) if remaining_path.peek().is_some() => { + existing.entries.push((span, remaining_path, value)) + } + + Binding::Set(existing) => { + if let ast::Expr::AttrSet(new) = value { + existing.inherits.extend(ast::HasEntry::inherits(&new)); + existing + .entries + .extend(ast::HasEntry::attrpath_values(&new).map(|entry| { + let span = c.span_for(&entry); + ( + span, + entry.attrpath().unwrap().attrs().peekable(), + entry.value().unwrap(), + ) + })); + } else { + // This branch is unreachable because in cases where the + // path is empty (i.e. there is no further nesting), the + // previous try_merge function already verified that the + // expression is an attribute set. + + // TODO(tazjin): Consider making this branch live by + // shuffling that check around and emitting a static error + // here instead of a runtime error. + unreachable!() + } + } + } + } +} + +enum KeySlot { + /// There is no key slot (`let`-expressions do not emit their key). + None { name: SmolStr }, + + /// The key is statically known and has a slot. + Static { slot: LocalIdx, name: SmolStr }, + + /// The key is dynamic, i.e. only known at runtime, and must be compiled + /// into its slot. + Dynamic { slot: LocalIdx, attr: ast::Attr }, +} + +struct TrackedBinding { + key_slot: KeySlot, + value_slot: LocalIdx, + binding: Binding, +} + +impl TrackedBinding { + /// Does this binding match the given key? + /// + /// Used to determine which binding to merge another one into. + fn matches(&self, key: &str) -> bool { + match &self.key_slot { + KeySlot::None { name } => name == key, + KeySlot::Static { name, .. } => name == key, + KeySlot::Dynamic { .. } => false, + } + } +} + +struct TrackedBindings { + bindings: Vec<TrackedBinding>, +} + +impl TrackedBindings { + fn new() -> Self { + TrackedBindings { bindings: vec![] } + } + + /// Attempt to merge an entry into an existing matching binding, assuming + /// that the provided binding is mergable (i.e. either a nested key or an + /// attribute set literal). + /// + /// Returns true if the binding was merged, false if it needs to be compiled + /// separately as a new binding. + fn try_merge( + &mut self, + c: &mut Compiler, + span: Span, + name: &ast::Attr, + mut remaining_path: PeekableAttrs, + value: ast::Expr, + ) -> bool { + // If the path has no more entries, and if the entry is not an + // attribute set literal, the entry can not be merged. + if remaining_path.peek().is_none() && !matches!(value, ast::Expr::AttrSet(_)) { + return false; + } + + // If the first element of the path is not statically known, the entry + // can not be merged. + let name = match expr_static_attr_str(name) { + Some(name) => name, + None => return false, + }; + + // If there is no existing binding with this key, the entry can not be + // merged. + // TODO: benchmark whether using a map or something is useful over the + // `find` here + let binding = match self.bindings.iter_mut().find(|b| b.matches(&name)) { + Some(b) => b, + None => return false, + }; + + // No more excuses ... the binding can be merged! + binding.binding.merge(c, span, remaining_path, value); + + true + } + + /// Add a completely new binding to the tracked bindings. + fn track_new(&mut self, key_slot: KeySlot, value_slot: LocalIdx, binding: Binding) { + self.bindings.push(TrackedBinding { + key_slot, + value_slot, + binding, + }); + } +} + +/// Wrapper around the `ast::HasEntry` trait as that trait can not be +/// implemented for custom types. +trait HasEntryProxy { + fn inherits(&self) -> Box<dyn Iterator<Item = ast::Inherit>>; + + fn attributes( + &self, + file: Arc<codemap::File>, + ) -> Box<dyn Iterator<Item = (Span, PeekableAttrs, ast::Expr)>>; +} + +impl<N: HasEntry> HasEntryProxy for N { + fn inherits(&self) -> Box<dyn Iterator<Item = ast::Inherit>> { + Box::new(ast::HasEntry::inherits(self)) + } + + fn attributes( + &self, + file: Arc<codemap::File>, + ) -> Box<dyn Iterator<Item = (Span, PeekableAttrs, ast::Expr)>> { + Box::new(ast::HasEntry::attrpath_values(self).map(move |entry| { + ( + entry.span_for(&file), + entry.attrpath().unwrap().attrs().peekable(), + entry.value().unwrap(), + ) + })) + } +} + +impl HasEntryProxy for AttributeSet { + fn inherits(&self) -> Box<dyn Iterator<Item = ast::Inherit>> { + Box::new(self.inherits.clone().into_iter()) + } + + fn attributes( + &self, + _: Arc<codemap::File>, + ) -> Box<dyn Iterator<Item = (Span, PeekableAttrs, ast::Expr)>> { + Box::new(self.entries.clone().into_iter()) + } +} + +/// AST-traversing functions related to bindings. +impl Compiler<'_> { + /// Compile all inherits of a node with entries that do *not* have a + /// namespace to inherit from, and return the remaining ones that do. + fn compile_plain_inherits<N>( + &mut self, + slot: LocalIdx, + kind: BindingsKind, + count: &mut usize, + node: &N, + ) -> Vec<(ast::Expr, SmolStr, Span)> + where + N: ToSpan + HasEntryProxy, + { + // Pass over all inherits, resolving only those without namespaces. + // Since they always resolve in a higher scope, we can just compile and + // declare them immediately. + // + // Inherits with namespaces are returned to the caller. + let mut inherit_froms: Vec<(ast::Expr, SmolStr, Span)> = vec![]; + + for inherit in node.inherits() { + if inherit.attrs().peekable().peek().is_none() { + self.emit_warning(&inherit, WarningKind::EmptyInherit); + continue; + } + + match inherit.from() { + // Within a `let` binding, inheriting from the outer scope is a + // no-op *if* there are no dynamic bindings. + None if !kind.is_attrs() && !self.has_dynamic_ancestor() => { + self.emit_warning(&inherit, WarningKind::UselessInherit); + continue; + } + + None => { + for attr in inherit.attrs() { + let name = match expr_static_attr_str(&attr) { + Some(name) => name, + None => { + self.emit_error(&attr, ErrorKind::DynamicKeyInScope("inherit")); + continue; + } + }; + + // If the identifier resolves statically in a `let`, it + // has precedence over dynamic bindings, and the inherit + // is useless. + if kind == BindingsKind::LetIn + && matches!( + self.scope_mut().resolve_local(&name), + LocalPosition::Known(_) + ) + { + self.emit_warning(&attr, WarningKind::UselessInherit); + continue; + } + + *count += 1; + + // Place key on the stack when compiling attribute sets. + if kind.is_attrs() { + self.emit_constant(Value::String(name.as_str().into()), &attr); + let span = self.span_for(&attr); + self.scope_mut().declare_phantom(span, true); + } + + // Place the value on the stack. Note that because plain + // inherits are always in the outer scope, the slot of + // *this* scope itself is used. + self.compile_identifier_access(slot, &name, &attr); + + // In non-recursive attribute sets, the key slot must be + // a phantom (i.e. the identifier can not be resolved in + // this scope). + let idx = if kind == BindingsKind::Attrs { + let span = self.span_for(&attr); + self.scope_mut().declare_phantom(span, false) + } else { + self.declare_local(&attr, name) + }; + + self.scope_mut().mark_initialised(idx); + } + } + + Some(from) => { + for attr in inherit.attrs() { + let name = match expr_static_attr_str(&attr) { + Some(name) => name, + None => { + self.emit_error(&attr, ErrorKind::DynamicKeyInScope("inherit")); + continue; + } + }; + + *count += 1; + inherit_froms.push((from.expr().unwrap(), name, self.span_for(&attr))); + } + } + } + } + + inherit_froms + } + + /// Declare all namespaced inherits, that is inherits which are inheriting + /// values from an attribute set. + /// + /// This only ensures that the locals stack is aware of the inherits, it + /// does not yet emit bytecode that places them on the stack. This is up to + /// the owner of the `bindings` vector, which this function will populate. + fn declare_namespaced_inherits( + &mut self, + kind: BindingsKind, + inherit_froms: Vec<(ast::Expr, SmolStr, Span)>, + bindings: &mut TrackedBindings, + ) { + for (from, name, span) in inherit_froms { + let key_slot = if kind.is_attrs() { + // In an attribute set, the keys themselves are placed on the + // stack but their stack slot is inaccessible (it is only + // consumed by `OpAttrs`). + KeySlot::Static { + slot: self.scope_mut().declare_phantom(span, false), + name: name.clone(), + } + } else { + KeySlot::None { name: name.clone() } + }; + + let value_slot = match kind { + // In recursive scopes, the value needs to be accessible on the + // stack. + BindingsKind::LetIn | BindingsKind::RecAttrs => { + self.declare_local(&span, name.clone()) + } + + // In non-recursive attribute sets, the value is inaccessible + // (only consumed by `OpAttrs`). + BindingsKind::Attrs => self.scope_mut().declare_phantom(span, false), + }; + + bindings.track_new( + key_slot, + value_slot, + Binding::InheritFrom { + namespace: from, + name, + span, + }, + ); + } + } + + /// Declare all regular bindings (i.e. `key = value;`) in a bindings scope, + /// but do not yet compile their values. + fn declare_bindings<N>( + &mut self, + kind: BindingsKind, + count: &mut usize, + bindings: &mut TrackedBindings, + node: &N, + ) where + N: ToSpan + HasEntryProxy, + { + for (span, mut path, value) in node.attributes(self.file.clone()) { + let key = path.next().unwrap(); + + if bindings.try_merge(self, span, &key, path.clone(), value.clone()) { + // Binding is nested, or already exists and was merged, move on. + continue; + } + + *count += 1; + + let key_span = self.span_for(&key); + let key_slot = match expr_static_attr_str(&key) { + Some(name) if kind.is_attrs() => KeySlot::Static { + name, + slot: self.scope_mut().declare_phantom(key_span, false), + }, + + Some(name) => KeySlot::None { name }, + + None if kind.is_attrs() => KeySlot::Dynamic { + attr: key, + slot: self.scope_mut().declare_phantom(key_span, false), + }, + + None => { + self.emit_error(&key, ErrorKind::DynamicKeyInScope("let-expression")); + continue; + } + }; + + let value_slot = match kind { + BindingsKind::LetIn | BindingsKind::RecAttrs => match &key_slot { + // In recursive scopes, the value needs to be accessible on the + // stack if it is statically known + KeySlot::None { name } | KeySlot::Static { name, .. } => { + self.declare_local(&key_span, name.as_str()) + } + + // Dynamic values are never resolvable (as their names are + // of course only known at runtime). + // + // Note: This branch is unreachable in `let`-expressions. + KeySlot::Dynamic { .. } => self.scope_mut().declare_phantom(key_span, false), + }, + + // In non-recursive attribute sets, the value is inaccessible + // (only consumed by `OpAttrs`). + BindingsKind::Attrs => self.scope_mut().declare_phantom(key_span, false), + }; + + let binding = if path.peek().is_some() { + Binding::Set(AttributeSet { + span, + kind: BindingsKind::Attrs, + inherits: vec![], + entries: vec![(span, path, value)], + }) + } else { + Binding::Plain { expr: value } + }; + + bindings.track_new(key_slot, value_slot, binding); + } + } + + /// Compile attribute set literals into equivalent bytecode. + /// + /// This is complicated by a number of features specific to Nix attribute + /// sets, most importantly: + /// + /// 1. Keys can be dynamically constructed through interpolation. + /// 2. Keys can refer to nested attribute sets. + /// 3. Attribute sets can (optionally) be recursive. + pub(super) fn compile_attr_set(&mut self, slot: LocalIdx, node: &ast::AttrSet) { + // Open a scope to track the positions of the temporaries used by the + // `OpAttrs` instruction. + self.scope_mut().begin_scope(); + + let kind = if node.rec_token().is_some() { + BindingsKind::RecAttrs + } else { + BindingsKind::Attrs + }; + + self.compile_bindings(slot, kind, node); + + // Remove the temporary scope, but do not emit any additional cleanup + // (OpAttrs consumes all of these locals). + self.scope_mut().end_scope(); + } + + /// Actually binds all tracked bindings by emitting the bytecode that places + /// them in their stack slots. + fn bind_values(&mut self, bindings: TrackedBindings) { + let mut value_indices: Vec<LocalIdx> = vec![]; + + for binding in bindings.bindings.into_iter() { + value_indices.push(binding.value_slot); + + match binding.key_slot { + KeySlot::None { .. } => {} // nothing to do here + + KeySlot::Static { slot, name } => { + let span = self.scope()[slot].span; + self.emit_constant(Value::String(name.as_str().into()), &span); + self.scope_mut().mark_initialised(slot); + } + + KeySlot::Dynamic { slot, attr } => { + self.compile_attr(slot, &attr); + self.scope_mut().mark_initialised(slot); + } + } + + match binding.binding { + // This entry is an inherit (from) expr. The value is placed on + // the stack by selecting an attribute. + Binding::InheritFrom { + namespace, + name, + span, + } => { + // Create a thunk wrapping value (which may be one as well) + // to avoid forcing the from expr too early. + self.thunk(binding.value_slot, &namespace, |c, s| { + c.compile(s, namespace.clone()); + c.emit_force(&namespace); + + c.emit_constant(Value::String(name.as_str().into()), &span); + c.push_op(OpCode::OpAttrsSelect, &span); + }) + } + + // Binding is "just" a plain expression that needs to be + // compiled. + Binding::Plain { expr } => self.compile(binding.value_slot, expr), + + // Binding is a merged or nested attribute set, and needs to be + // recursively compiled as another binding. + Binding::Set(set) => self.thunk(binding.value_slot, &set, |c, _| { + c.scope_mut().begin_scope(); + c.compile_bindings(binding.value_slot, set.kind, &set); + c.scope_mut().end_scope(); + }), + } + + // Any code after this point will observe the value in the right + // stack slot, so mark it as initialised. + self.scope_mut().mark_initialised(binding.value_slot); + } + + // Final pass to emit finaliser instructions if necessary. + for idx in value_indices { + if self.scope()[idx].needs_finaliser { + let stack_idx = self.scope().stack_index(idx); + let span = self.scope()[idx].span; + self.push_op(OpCode::OpFinalise(stack_idx), &span); + } + } + } + + fn compile_bindings<N>(&mut self, slot: LocalIdx, kind: BindingsKind, node: &N) + where + N: ToSpan + HasEntryProxy, + { + let mut count = 0; + self.scope_mut().begin_scope(); + + // Vector to track all observed bindings. + let mut bindings = TrackedBindings::new(); + + let inherit_froms = self.compile_plain_inherits(slot, kind, &mut count, node); + self.declare_namespaced_inherits(kind, inherit_froms, &mut bindings); + self.declare_bindings(kind, &mut count, &mut bindings, node); + + // Check if we can bail out on empty bindings + if count == 0 { + // still need an attrset to exist, but it is empty. + if kind.is_attrs() { + self.emit_constant(Value::Attrs(Box::new(NixAttrs::empty())), node); + return; + } + + self.emit_warning(node, WarningKind::EmptyLet); + return; + } + + // Actually bind values and ensure they are on the stack. + self.bind_values(bindings); + + if kind.is_attrs() { + self.push_op(OpCode::OpAttrs(Count(count)), node); + } + + if count == 0 { + self.unthunk(); + } + } + + /// Compile a standard `let ...; in ...` expression. + /// + /// Unless in a non-standard scope, the encountered values are simply pushed + /// on the stack and their indices noted in the entries vector. + pub(super) fn compile_let_in(&mut self, slot: LocalIdx, node: &ast::LetIn) { + self.compile_bindings(slot, BindingsKind::LetIn, node); + + // Deal with the body, then clean up the locals afterwards. + self.compile(slot, node.body().unwrap()); + self.cleanup_scope(node); + } + + pub(super) fn compile_legacy_let(&mut self, slot: LocalIdx, node: &ast::LegacyLet) { + self.emit_warning(node, WarningKind::DeprecatedLegacyLet); + self.scope_mut().begin_scope(); + self.compile_bindings(slot, BindingsKind::RecAttrs, node); + + // Remove the temporary scope, but do not emit any additional cleanup + // (OpAttrs consumes all of these locals). + self.scope_mut().end_scope(); + + self.emit_constant(Value::String("body".into()), node); + self.push_op(OpCode::OpAttrsSelect, node); + } + + /// Is the given identifier defined *by the user* in any current scope? + pub(super) fn is_user_defined(&mut self, ident: &str) -> bool { + matches!( + self.scope_mut().resolve_local(ident), + LocalPosition::Known(_) | LocalPosition::Recursive(_) + ) + } + + /// Resolve and compile access to an identifier in the scope. + fn compile_identifier_access<N: ToSpan + Clone>( + &mut self, + slot: LocalIdx, + ident: &str, + node: &N, + ) { + match self.scope_mut().resolve_local(ident) { + LocalPosition::Unknown => { + // Are we possibly dealing with an upvalue? + if let Some(idx) = self.resolve_upvalue(self.contexts.len() - 1, ident, node) { + self.push_op(OpCode::OpGetUpvalue(idx), node); + return; + } + + // Globals are the "upmost upvalues": they behave + // exactly like a `let ... in` prepended to the + // program's text, and the global scope is nothing + // more than the parent scope of the root scope. + if let Some(global) = self.globals.get(ident) { + self.emit_constant(global.clone(), &self.span_for(node)); + return; + } + + // If there is a non-empty `with`-stack (or a parent context + // with one), emit a runtime dynamic resolution instruction. + // + // Since it is possible for users to e.g. assign a variable to a + // dynamic resolution without actually using it, this operation + // is wrapped in an extra thunk. + if self.has_dynamic_ancestor() { + self.thunk(slot, node, |c, _| { + c.context_mut().captures_with_stack = true; + c.emit_constant(Value::String(ident.into()), node); + c.push_op(OpCode::OpResolveWith, node); + }); + return; + } + + // Otherwise, this variable is missing. + self.emit_error(node, ErrorKind::UnknownStaticVariable); + } + + LocalPosition::Known(idx) => { + let stack_idx = self.scope().stack_index(idx); + self.push_op(OpCode::OpGetLocal(stack_idx), node); + } + + // This identifier is referring to a value from the same scope which + // is not yet defined. This identifier access must be thunked. + LocalPosition::Recursive(idx) => self.thunk(slot, node, move |compiler, _| { + let upvalue_idx = compiler.add_upvalue( + compiler.contexts.len() - 1, + node, + UpvalueKind::Local(idx), + ); + compiler.push_op(OpCode::OpGetUpvalue(upvalue_idx), node); + }), + }; + } + + pub(super) fn compile_ident(&mut self, slot: LocalIdx, node: &ast::Ident) { + let ident = node.ident_token().unwrap(); + self.compile_identifier_access(slot, ident.text(), node); + } +} + +/// Private compiler helpers related to bindings. +impl Compiler<'_> { + fn resolve_upvalue<N: ToSpan>( + &mut self, + ctx_idx: usize, + name: &str, + node: &N, + ) -> Option<UpvalueIdx> { + if ctx_idx == 0 { + // There can not be any upvalue at the outermost context. + return None; + } + + // Determine whether the upvalue is a local in the enclosing context. + match self.contexts[ctx_idx - 1].scope.resolve_local(name) { + // recursive upvalues are dealt with the same way as standard known + // ones, as thunks and closures are guaranteed to be placed on the + // stack (i.e. in the right position) *during* their runtime + // construction + LocalPosition::Known(idx) | LocalPosition::Recursive(idx) => { + return Some(self.add_upvalue(ctx_idx, node, UpvalueKind::Local(idx))) + } + + LocalPosition::Unknown => { /* continue below */ } + }; + + // If the upvalue comes from even further up, we need to recurse to make + // sure that the upvalues are created at each level. + if let Some(idx) = self.resolve_upvalue(ctx_idx - 1, name, node) { + return Some(self.add_upvalue(ctx_idx, node, UpvalueKind::Upvalue(idx))); + } + + None + } + + fn add_upvalue<N: ToSpan>( + &mut self, + ctx_idx: usize, + node: &N, + kind: UpvalueKind, + ) -> UpvalueIdx { + // If there is already an upvalue closing over the specified index, + // retrieve that instead. + for (idx, existing) in self.contexts[ctx_idx].scope.upvalues.iter().enumerate() { + if existing.kind == kind { + return UpvalueIdx(idx); + } + } + + let span = self.span_for(node); + self.contexts[ctx_idx] + .scope + .upvalues + .push(Upvalue { kind, span }); + + let idx = UpvalueIdx(self.contexts[ctx_idx].lambda.upvalue_count); + self.contexts[ctx_idx].lambda.upvalue_count += 1; + idx + } +} diff --git a/tvix/eval/src/compiler/import.rs b/tvix/eval/src/compiler/import.rs new file mode 100644 index 000000000000..7e1aabbf1f8b --- /dev/null +++ b/tvix/eval/src/compiler/import.rs @@ -0,0 +1,116 @@ +//! This module implements the Nix language's `import` feature, which +//! is exposed as a builtin in the Nix language. +//! +//! This is not a typical builtin, as it needs access to internal +//! compiler and VM state (such as the [`crate::SourceCode`] +//! instance, or observers). + +use super::GlobalsMap; +use genawaiter::rc::Gen; +use std::rc::Weak; + +use crate::{ + builtins::coerce_value_to_path, + generators::pin_generator, + observer::NoOpObserver, + value::{Builtin, Thunk}, + vm::generators::{self, GenCo}, + ErrorKind, SourceCode, Value, +}; + +async fn import_impl( + co: GenCo, + globals: Weak<GlobalsMap>, + source: SourceCode, + mut args: Vec<Value>, +) -> Result<Value, ErrorKind> { + let mut path = coerce_value_to_path(&co, args.pop().unwrap()).await?; + + if path.is_dir() { + path.push("default.nix"); + } + + if let Some(cached) = generators::request_import_cache_lookup(&co, path.clone()).await { + return Ok(cached); + } + + // TODO(tazjin): make this return a string directly instead + let contents = generators::request_read_to_string(&co, path.clone()) + .await + .to_str()? + .as_str() + .to_string(); + + let parsed = rnix::ast::Root::parse(&contents); + let errors = parsed.errors(); + let file = source.add_file(path.to_string_lossy().to_string(), contents); + + if !errors.is_empty() { + return Err(ErrorKind::ImportParseError { + path, + file, + errors: errors.to_vec(), + }); + } + + let result = crate::compiler::compile( + &parsed.tree().expr().unwrap(), + Some(path.clone()), + file, + // The VM must ensure that a strong reference to the globals outlives + // any self-references (which are weak) embedded within the globals. If + // the expect() below panics, it means that did not happen. + globals + .upgrade() + .expect("globals dropped while still in use"), + &mut NoOpObserver::default(), + ) + .map_err(|err| ErrorKind::ImportCompilerError { + path: path.clone(), + errors: vec![err], + })?; + + if !result.errors.is_empty() { + return Err(ErrorKind::ImportCompilerError { + path, + errors: result.errors, + }); + } + + for warning in result.warnings { + generators::emit_warning(&co, warning).await; + } + + // Compilation succeeded, we can construct a thunk from whatever it spat + // out and return that. + let res = Value::Thunk(Thunk::new_suspended( + result.lambda, + generators::request_span(&co).await, + )); + + generators::request_import_cache_put(&co, path, res.clone()).await; + + Ok(res) +} + +/// Constructs the `import` builtin. This builtin is special in that +/// it needs to capture the [crate::SourceCode] structure to correctly +/// track source code locations while invoking a compiler. +// TODO: need to be able to pass through a CompilationObserver, too. +// TODO: can the `SourceCode` come from the compiler? +pub(super) fn builtins_import(globals: &Weak<GlobalsMap>, source: SourceCode) -> Builtin { + // This (very cheap, once-per-compiler-startup) clone exists + // solely in order to keep the borrow checker happy. It + // resolves the tension between the requirements of + // Rc::new_cyclic() and Builtin::new() + let globals = globals.clone(); + + Builtin::new( + "import", + Some("Import the given file and return the Nix value it evaluates to"), + 1, + move |args| { + Gen::new(|co| pin_generator(import_impl(co, globals.clone(), source.clone(), args))) + }, + ) +} diff --git a/tvix/eval/src/compiler/mod.rs b/tvix/eval/src/compiler/mod.rs new file mode 100644 index 000000000000..478d132758e8 --- /dev/null +++ b/tvix/eval/src/compiler/mod.rs @@ -0,0 +1,1497 @@ +//! This module implements a compiler for compiling the rnix AST +//! representation to Tvix bytecode. +//! +//! A note on `unwrap()`: This module contains a lot of calls to +//! `unwrap()` or `expect(...)` on data structures returned by `rnix`. +//! The reason for this is that rnix uses the same data structures to +//! represent broken and correct ASTs, so all typed AST variants have +//! the ability to represent an incorrect node. +//! +//! However, at the time that the AST is passed to the compiler we +//! have verified that `rnix` considers the code to be correct, so all +//! variants are fulfilled. In cases where the invariant is guaranteed +//! by the code in this module, `debug_assert!` has been used to catch +//! mistakes early during development. + +mod bindings; +mod import; +mod optimiser; +mod scope; + +use codemap::Span; +use rnix::ast::{self, AstToken}; +use smol_str::SmolStr; +use std::collections::HashMap; +use std::path::{Path, PathBuf}; +use std::rc::{Rc, Weak}; +use std::sync::Arc; + +use crate::chunk::Chunk; +use crate::errors::{Error, ErrorKind, EvalResult}; +use crate::observer::CompilerObserver; +use crate::opcode::{CodeIdx, ConstantIdx, Count, JumpOffset, OpCode, UpvalueIdx}; +use crate::spans::LightSpan; +use crate::spans::ToSpan; +use crate::value::{Closure, Formals, Lambda, NixAttrs, Thunk, Value}; +use crate::warnings::{EvalWarning, WarningKind}; +use crate::SourceCode; + +use self::scope::{LocalIdx, LocalPosition, Scope, Upvalue, UpvalueKind}; + +/// Represents the result of compiling a piece of Nix code. If +/// compilation was successful, the resulting bytecode can be passed +/// to the VM. +pub struct CompilationOutput { + pub lambda: Rc<Lambda>, + pub warnings: Vec<EvalWarning>, + pub errors: Vec<Error>, + + // This field must outlive the rc::Weak reference which breaks the + // builtins -> import -> builtins reference cycle. For this + // reason, it must be passed to the VM. + pub globals: Rc<GlobalsMap>, +} + +/// Represents the lambda currently being compiled. +struct LambdaCtx { + lambda: Lambda, + scope: Scope, + captures_with_stack: bool, + unthunk: bool, +} + +impl LambdaCtx { + fn new() -> Self { + LambdaCtx { + lambda: Lambda::default(), + scope: Default::default(), + captures_with_stack: false, + unthunk: false, + } + } + + fn inherit(&self) -> Self { + LambdaCtx { + lambda: Lambda::default(), + scope: self.scope.inherit(), + captures_with_stack: false, + unthunk: false, + } + } +} + +/// The map of globally available functions and other values that +/// should implicitly be resolvable in the global scope. +pub(crate) type GlobalsMap = HashMap<&'static str, Value>; + +/// Set of builtins that (if they exist) should be made available in +/// the global scope, meaning that they can be accessed not just +/// through `builtins.<name>`, but directly as `<name>`. This is not +/// configurable, it is based on what Nix 2.3 exposed. +const GLOBAL_BUILTINS: &[&str] = &[ + "abort", + "baseNameOf", + "derivation", + "derivationStrict", + "dirOf", + "fetchGit", + "fetchMercurial", + "fetchTarball", + "fromTOML", + "import", + "isNull", + "map", + "placeholder", + "removeAttrs", + "scopedImport", + "throw", + "toString", +]; + +pub struct Compiler<'observer> { + contexts: Vec<LambdaCtx>, + warnings: Vec<EvalWarning>, + errors: Vec<Error>, + root_dir: PathBuf, + + /// Carries all known global tokens; the full set of which is + /// created when the compiler is invoked. + /// + /// Each global has an associated token, which when encountered as + /// an identifier is resolved against the scope poisoning logic, + /// and a function that should emit code for the token. + globals: Rc<GlobalsMap>, + + /// File reference in the codemap contains all known source code + /// and is used to track the spans from which instructions where + /// derived. + file: Arc<codemap::File>, + + /// Carry an observer for the compilation process, which is called + /// whenever a chunk is emitted. + observer: &'observer mut dyn CompilerObserver, + + /// Carry a count of nested scopes which have requested the + /// compiler not to emit anything. This used for compiling dead + /// code branches to catch errors & warnings in them. + dead_scope: usize, +} + +impl Compiler<'_> { + pub(super) fn span_for<S: ToSpan>(&self, to_span: &S) -> Span { + to_span.span_for(&self.file) + } +} + +/// Compiler construction +impl<'observer> Compiler<'observer> { + pub(crate) fn new( + location: Option<PathBuf>, + file: Arc<codemap::File>, + globals: Rc<GlobalsMap>, + observer: &'observer mut dyn CompilerObserver, + ) -> EvalResult<Self> { + let mut root_dir = match location { + Some(dir) if cfg!(target_arch = "wasm32") || dir.is_absolute() => Ok(dir), + _ => { + let current_dir = std::env::current_dir().map_err(|e| { + Error::new( + ErrorKind::RelativePathResolution(format!( + "could not determine current directory: {}", + e + )), + file.span, + ) + })?; + if let Some(dir) = location { + Ok(current_dir.join(dir)) + } else { + Ok(current_dir) + } + } + }?; + + // If the path passed from the caller points to a file, the + // filename itself needs to be truncated as this must point to a + // directory. + if root_dir.is_file() { + root_dir.pop(); + } + + #[cfg(not(target_arch = "wasm32"))] + debug_assert!(root_dir.is_absolute()); + + Ok(Self { + root_dir, + file, + observer, + globals, + contexts: vec![LambdaCtx::new()], + warnings: vec![], + errors: vec![], + dead_scope: 0, + }) + } +} + +// Helper functions for emitting code and metadata to the internal +// structures of the compiler. +impl Compiler<'_> { + fn context(&self) -> &LambdaCtx { + &self.contexts[self.contexts.len() - 1] + } + + fn context_mut(&mut self) -> &mut LambdaCtx { + let idx = self.contexts.len() - 1; + &mut self.contexts[idx] + } + + fn chunk(&mut self) -> &mut Chunk { + &mut self.context_mut().lambda.chunk + } + + fn scope(&self) -> &Scope { + &self.context().scope + } + + fn scope_mut(&mut self) -> &mut Scope { + &mut self.context_mut().scope + } + + /// Push a single instruction to the current bytecode chunk and + /// track the source span from which it was compiled. + fn push_op<T: ToSpan>(&mut self, data: OpCode, node: &T) -> CodeIdx { + if self.dead_scope > 0 { + return CodeIdx(0); + } + + let span = self.span_for(node); + self.chunk().push_op(data, span) + } + + /// Emit a single constant to the current bytecode chunk and track + /// the source span from which it was compiled. + pub(super) fn emit_constant<T: ToSpan>(&mut self, value: Value, node: &T) { + if self.dead_scope > 0 { + return; + } + + let idx = self.chunk().push_constant(value); + self.push_op(OpCode::OpConstant(idx), node); + } +} + +// Actual code-emitting AST traversal methods. +impl Compiler<'_> { + fn compile(&mut self, slot: LocalIdx, expr: ast::Expr) { + let expr = optimiser::optimise_expr(self, slot, expr); + + match &expr { + ast::Expr::Literal(literal) => self.compile_literal(literal), + ast::Expr::Path(path) => self.compile_path(slot, path), + ast::Expr::Str(s) => self.compile_str(slot, s), + + ast::Expr::UnaryOp(op) => self.thunk(slot, op, move |c, s| c.compile_unary_op(s, op)), + + ast::Expr::BinOp(binop) => { + self.thunk(slot, binop, move |c, s| c.compile_binop(s, binop)) + } + + ast::Expr::HasAttr(has_attr) => { + self.thunk(slot, has_attr, move |c, s| c.compile_has_attr(s, has_attr)) + } + + ast::Expr::List(list) => self.thunk(slot, list, move |c, s| c.compile_list(s, list)), + + ast::Expr::AttrSet(attrs) => { + self.thunk(slot, attrs, move |c, s| c.compile_attr_set(s, attrs)) + } + + ast::Expr::Select(select) => { + self.thunk(slot, select, move |c, s| c.compile_select(s, select)) + } + + ast::Expr::Assert(assert) => { + self.thunk(slot, assert, move |c, s| c.compile_assert(s, assert)) + } + ast::Expr::IfElse(if_else) => { + self.thunk(slot, if_else, move |c, s| c.compile_if_else(s, if_else)) + } + + ast::Expr::LetIn(let_in) => { + self.thunk(slot, let_in, move |c, s| c.compile_let_in(s, let_in)) + } + + ast::Expr::Ident(ident) => self.compile_ident(slot, ident), + ast::Expr::With(with) => self.thunk(slot, with, |c, s| c.compile_with(s, with)), + ast::Expr::Lambda(lambda) => self.thunk(slot, lambda, move |c, s| { + c.compile_lambda_or_thunk(false, s, lambda, |c, s| c.compile_lambda(s, lambda)) + }), + ast::Expr::Apply(apply) => { + self.thunk(slot, apply, move |c, s| c.compile_apply(s, apply)) + } + + // Parenthesized expressions are simply unwrapped, leaving + // their value on the stack. + ast::Expr::Paren(paren) => self.compile(slot, paren.expr().unwrap()), + + ast::Expr::LegacyLet(legacy_let) => self.thunk(slot, legacy_let, move |c, s| { + c.compile_legacy_let(s, legacy_let) + }), + + ast::Expr::Root(_) => unreachable!("there cannot be more than one root"), + ast::Expr::Error(_) => unreachable!("compile is only called on validated trees"), + } + } + + /// Compiles an expression, but does not emit any code for it as + /// it is considered dead. This will still catch errors and + /// warnings in that expression. + /// + /// A warning about the that code being dead is assumed to already be + /// emitted by the caller of [compile_dead_code]. + fn compile_dead_code(&mut self, slot: LocalIdx, node: ast::Expr) { + self.dead_scope += 1; + self.compile(slot, node); + self.dead_scope -= 1; + } + + fn compile_literal(&mut self, node: &ast::Literal) { + let value = match node.kind() { + ast::LiteralKind::Float(f) => Value::Float(f.value().unwrap()), + ast::LiteralKind::Integer(i) => match i.value() { + Ok(v) => Value::Integer(v), + Err(err) => return self.emit_error(node, err.into()), + }, + + ast::LiteralKind::Uri(u) => { + self.emit_warning(node, WarningKind::DeprecatedLiteralURL); + Value::String(u.syntax().text().into()) + } + }; + + self.emit_constant(value, node); + } + + fn compile_path(&mut self, slot: LocalIdx, node: &ast::Path) { + // TODO(tazjin): placeholder implementation while waiting for + // https://github.com/nix-community/rnix-parser/pull/96 + + let raw_path = node.to_string(); + let path = if raw_path.starts_with('/') { + Path::new(&raw_path).to_owned() + } else if raw_path.starts_with('~') { + return self.thunk(slot, node, move |c, _| { + // We assume that home paths start with ~/ or fail to parse + // TODO: this should be checked using a parse-fail test. + debug_assert!(raw_path.len() > 2 && raw_path.starts_with("~/")); + + let home_relative_path = &raw_path[2..(raw_path.len())]; + c.emit_constant( + Value::UnresolvedPath(Box::new(home_relative_path.into())), + node, + ); + c.push_op(OpCode::OpResolveHomePath, node); + }); + } else if raw_path.starts_with('<') { + // TODO: decide what to do with findFile + if raw_path.len() == 2 { + return self.emit_error( + node, + ErrorKind::NixPathResolution("Empty <> path not allowed".into()), + ); + } + let path = &raw_path[1..(raw_path.len() - 1)]; + // Make a thunk to resolve the path (without using `findFile`, at least for now?) + return self.thunk(slot, node, move |c, _| { + c.emit_constant(Value::UnresolvedPath(Box::new(path.into())), node); + c.push_op(OpCode::OpFindFile, node); + }); + } else { + let mut buf = self.root_dir.clone(); + buf.push(&raw_path); + buf + }; + + // TODO: Use https://github.com/rust-lang/rfcs/issues/2208 + // once it is available + let value = Value::Path(Box::new(crate::value::canon_path(path))); + self.emit_constant(value, node); + } + + /// Helper that compiles the given string parts strictly. The caller + /// (`compile_str`) needs to figure out if the result of compiling this + /// needs to be thunked or not. + fn compile_str_parts( + &mut self, + slot: LocalIdx, + parent_node: &ast::Str, + parts: Vec<ast::InterpolPart<String>>, + ) { + // The string parts are produced in literal order, however + // they need to be reversed on the stack in order to + // efficiently create the real string in case of + // interpolation. + for part in parts.iter().rev() { + match part { + // Interpolated expressions are compiled as normal and + // dealt with by the VM before being assembled into + // the final string. We need to coerce them here, + // so OpInterpolate definitely has a string to consume. + ast::InterpolPart::Interpolation(ipol) => { + self.compile(slot, ipol.expr().unwrap()); + // implicitly forces as well + self.push_op(OpCode::OpCoerceToString, ipol); + } + + ast::InterpolPart::Literal(lit) => { + self.emit_constant(Value::String(lit.as_str().into()), parent_node); + } + } + } + + if parts.len() != 1 { + self.push_op(OpCode::OpInterpolate(Count(parts.len())), parent_node); + } + } + + fn compile_str(&mut self, slot: LocalIdx, node: &ast::Str) { + let parts = node.normalized_parts(); + + // We need to thunk string expressions if they are the result of + // interpolation. A string that only consists of a single part (`"${foo}"`) + // can't desugar to the enclosed expression (`foo`) because we need to + // coerce the result to a string value. This would require forcing the + // value of the inner expression, so we need to wrap it in another thunk. + if parts.len() != 1 || matches!(&parts[0], ast::InterpolPart::Interpolation(_)) { + self.thunk(slot, node, move |c, s| { + c.compile_str_parts(s, node, parts); + }); + } else { + self.compile_str_parts(slot, node, parts); + } + } + + fn compile_unary_op(&mut self, slot: LocalIdx, op: &ast::UnaryOp) { + self.compile(slot, op.expr().unwrap()); + self.emit_force(op); + + let opcode = match op.operator().unwrap() { + ast::UnaryOpKind::Invert => OpCode::OpInvert, + ast::UnaryOpKind::Negate => OpCode::OpNegate, + }; + + self.push_op(opcode, op); + } + + fn compile_binop(&mut self, slot: LocalIdx, op: &ast::BinOp) { + use ast::BinOpKind; + + // Short-circuiting and other strange operators, which are + // under the same node type as NODE_BIN_OP, but need to be + // handled separately (i.e. before compiling the expressions + // used for standard binary operators). + + match op.operator().unwrap() { + BinOpKind::And => return self.compile_and(slot, op), + BinOpKind::Or => return self.compile_or(slot, op), + BinOpKind::Implication => return self.compile_implication(slot, op), + _ => {} + }; + + // For all other operators, the two values need to be left on + // the stack in the correct order before pushing the + // instruction for the operation itself. + self.compile(slot, op.lhs().unwrap()); + self.emit_force(&op.lhs().unwrap()); + + self.compile(slot, op.rhs().unwrap()); + self.emit_force(&op.rhs().unwrap()); + + match op.operator().unwrap() { + BinOpKind::Add => self.push_op(OpCode::OpAdd, op), + BinOpKind::Sub => self.push_op(OpCode::OpSub, op), + BinOpKind::Mul => self.push_op(OpCode::OpMul, op), + BinOpKind::Div => self.push_op(OpCode::OpDiv, op), + BinOpKind::Update => self.push_op(OpCode::OpAttrsUpdate, op), + BinOpKind::Equal => self.push_op(OpCode::OpEqual, op), + BinOpKind::Less => self.push_op(OpCode::OpLess, op), + BinOpKind::LessOrEq => self.push_op(OpCode::OpLessOrEq, op), + BinOpKind::More => self.push_op(OpCode::OpMore, op), + BinOpKind::MoreOrEq => self.push_op(OpCode::OpMoreOrEq, op), + BinOpKind::Concat => self.push_op(OpCode::OpConcat, op), + + BinOpKind::NotEqual => { + self.push_op(OpCode::OpEqual, op); + self.push_op(OpCode::OpInvert, op) + } + + // Handled by separate branch above. + BinOpKind::And | BinOpKind::Implication | BinOpKind::Or => { + unreachable!() + } + }; + } + + fn compile_and(&mut self, slot: LocalIdx, node: &ast::BinOp) { + debug_assert!( + matches!(node.operator(), Some(ast::BinOpKind::And)), + "compile_and called with wrong operator kind: {:?}", + node.operator(), + ); + + // Leave left-hand side value on the stack. + self.compile(slot, node.lhs().unwrap()); + self.emit_force(&node.lhs().unwrap()); + + // If this value is false, jump over the right-hand side - the + // whole expression is false. + let end_idx = self.push_op(OpCode::OpJumpIfFalse(JumpOffset(0)), node); + + // Otherwise, remove the previous value and leave the + // right-hand side on the stack. Its result is now the value + // of the whole expression. + self.push_op(OpCode::OpPop, node); + self.compile(slot, node.rhs().unwrap()); + self.emit_force(&node.rhs().unwrap()); + + self.patch_jump(end_idx); + self.push_op(OpCode::OpAssertBool, node); + } + + fn compile_or(&mut self, slot: LocalIdx, node: &ast::BinOp) { + debug_assert!( + matches!(node.operator(), Some(ast::BinOpKind::Or)), + "compile_or called with wrong operator kind: {:?}", + node.operator(), + ); + + // Leave left-hand side value on the stack + self.compile(slot, node.lhs().unwrap()); + self.emit_force(&node.lhs().unwrap()); + + // Opposite of above: If this value is **true**, we can + // short-circuit the right-hand side. + let end_idx = self.push_op(OpCode::OpJumpIfTrue(JumpOffset(0)), node); + self.push_op(OpCode::OpPop, node); + self.compile(slot, node.rhs().unwrap()); + self.emit_force(&node.rhs().unwrap()); + + self.patch_jump(end_idx); + self.push_op(OpCode::OpAssertBool, node); + } + + fn compile_implication(&mut self, slot: LocalIdx, node: &ast::BinOp) { + debug_assert!( + matches!(node.operator(), Some(ast::BinOpKind::Implication)), + "compile_implication called with wrong operator kind: {:?}", + node.operator(), + ); + + // Leave left-hand side value on the stack and invert it. + self.compile(slot, node.lhs().unwrap()); + self.emit_force(&node.lhs().unwrap()); + self.push_op(OpCode::OpInvert, node); + + // Exactly as `||` (because `a -> b` = `!a || b`). + let end_idx = self.push_op(OpCode::OpJumpIfTrue(JumpOffset(0)), node); + self.push_op(OpCode::OpPop, node); + self.compile(slot, node.rhs().unwrap()); + self.emit_force(&node.rhs().unwrap()); + + self.patch_jump(end_idx); + self.push_op(OpCode::OpAssertBool, node); + } + + /// Compile list literals into equivalent bytecode. List + /// construction is fairly simple, consisting of pushing code for + /// each literal element and an instruction with the element + /// count. + /// + /// The VM, after evaluating the code for each element, simply + /// constructs the list from the given number of elements. + fn compile_list(&mut self, slot: LocalIdx, node: &ast::List) { + let mut count = 0; + + // Open a temporary scope to correctly account for stack items + // that exist during the construction. + self.scope_mut().begin_scope(); + + for item in node.items() { + // Start tracing new stack slots from the second list + // element onwards. The first list element is located in + // the stack slot of the list itself. + let item_slot = match count { + 0 => slot, + _ => { + let item_span = self.span_for(&item); + self.scope_mut().declare_phantom(item_span, false) + } + }; + + count += 1; + self.compile(item_slot, item); + self.scope_mut().mark_initialised(item_slot); + } + + if count == 0 { + self.unthunk(); + } + + self.push_op(OpCode::OpList(Count(count)), node); + self.scope_mut().end_scope(); + } + + fn compile_attr(&mut self, slot: LocalIdx, node: &ast::Attr) { + match node { + ast::Attr::Dynamic(dynamic) => { + self.compile(slot, dynamic.expr().unwrap()); + self.emit_force(&dynamic.expr().unwrap()); + } + + ast::Attr::Str(s) => { + self.compile_str(slot, s); + self.emit_force(s); + } + + ast::Attr::Ident(ident) => self.emit_literal_ident(ident), + } + } + + fn compile_has_attr(&mut self, slot: LocalIdx, node: &ast::HasAttr) { + // Put the attribute set on the stack. + self.compile(slot, node.expr().unwrap()); + self.emit_force(node); + + // Push all path fragments with an operation for fetching the + // next nested element, for all fragments except the last one. + for (count, fragment) in node.attrpath().unwrap().attrs().enumerate() { + if count > 0 { + self.push_op(OpCode::OpAttrsTrySelect, &fragment); + self.emit_force(&fragment); + } + + self.compile_attr(slot, &fragment); + } + + // After the last fragment, emit the actual instruction that + // leaves a boolean on the stack. + self.push_op(OpCode::OpHasAttr, node); + } + + /// When compiling select or select_or expressions, an optimisation is + /// possible of compiling the set emitted a constant attribute set by + /// immediately replacing it with the actual value. + /// + /// We take care not to emit an error here, as that would interfere with + /// thunking behaviour (there can be perfectly valid Nix code that accesses + /// a statically known attribute set that is lacking a key, because that + /// thunk is never evaluated). If anything is missing, just inform the + /// caller that the optimisation did not take place and move on. We may want + /// to emit warnings here in the future. + fn optimise_select(&mut self, path: &ast::Attrpath) -> bool { + // If compiling the set emitted a constant attribute set, the + // associated constant can immediately be replaced with the + // actual value. + // + // We take care not to emit an error here, as that would + // interfere with thunking behaviour (there can be perfectly + // valid Nix code that accesses a statically known attribute + // set that is lacking a key, because that thunk is never + // evaluated). If anything is missing, just move on. We may + // want to emit warnings here in the future. + if let Some(OpCode::OpConstant(ConstantIdx(idx))) = self.chunk().code.last().cloned() { + let constant = &mut self.chunk().constants[idx]; + if let Value::Attrs(attrs) = constant { + let mut path_iter = path.attrs(); + + // Only do this optimisation if there is a *single* + // element in the attribute path. It is extremely + // unlikely that we'd have a static nested set. + if let (Some(attr), None) = (path_iter.next(), path_iter.next()) { + // Only do this optimisation for statically known attrs. + if let Some(ident) = expr_static_attr_str(&attr) { + if let Some(selected_value) = attrs.select(ident.as_str()) { + *constant = selected_value.clone(); + + // If this worked, we can unthunk the current thunk. + self.unthunk(); + + return true; + } + } + } + } + } + + false + } + + fn compile_select(&mut self, slot: LocalIdx, node: &ast::Select) { + let set = node.expr().unwrap(); + let path = node.attrpath().unwrap(); + + if node.or_token().is_some() { + return self.compile_select_or(slot, set, path, node.default_expr().unwrap()); + } + + // Push the set onto the stack + self.compile(slot, set.clone()); + if self.optimise_select(&path) { + return; + } + + // Compile each key fragment and emit access instructions. + // + // TODO: multi-select instruction to avoid re-pushing attrs on + // nested selects. + for fragment in path.attrs() { + // Force the current set value. + self.emit_force(&set); + + self.compile_attr(slot, &fragment); + self.push_op(OpCode::OpAttrsSelect, &fragment); + } + } + + /// Compile an `or` expression into a chunk of conditional jumps. + /// + /// If at any point during attribute set traversal a key is + /// missing, the `OpAttrOrNotFound` instruction will leave a + /// special sentinel value on the stack. + /// + /// After each access, a conditional jump evaluates the top of the + /// stack and short-circuits to the default value if it sees the + /// sentinel. + /// + /// Code like `{ a.b = 1; }.a.c or 42` yields this bytecode and + /// runtime stack: + /// + /// ```notrust + /// Bytecode Runtime stack + /// ┌────────────────────────────┐ ┌─────────────────────────┐ + /// │ ... │ │ ... │ + /// │ 5 OP_ATTRS(1) │ → │ 5 [ { a.b = 1; } ] │ + /// │ 6 OP_CONSTANT("a") │ → │ 6 [ { a.b = 1; } "a" ] │ + /// │ 7 OP_ATTR_OR_NOT_FOUND │ → │ 7 [ { b = 1; } ] │ + /// │ 8 JUMP_IF_NOT_FOUND(13) │ → │ 8 [ { b = 1; } ] │ + /// │ 9 OP_CONSTANT("C") │ → │ 9 [ { b = 1; } "c" ] │ + /// │ 10 OP_ATTR_OR_NOT_FOUND │ → │ 10 [ NOT_FOUND ] │ + /// │ 11 JUMP_IF_NOT_FOUND(13) │ → │ 11 [ ] │ + /// │ 12 JUMP(14) │ │ .. jumped over │ + /// │ 13 CONSTANT(42) │ → │ 12 [ 42 ] │ + /// │ 14 ... │ │ .. .... │ + /// └────────────────────────────┘ └─────────────────────────┘ + /// ``` + fn compile_select_or( + &mut self, + slot: LocalIdx, + set: ast::Expr, + path: ast::Attrpath, + default: ast::Expr, + ) { + self.compile(slot, set); + if self.optimise_select(&path) { + return; + } + + let mut jumps = vec![]; + + for fragment in path.attrs() { + self.emit_force(&fragment); + self.compile_attr(slot, &fragment.clone()); + self.push_op(OpCode::OpAttrsTrySelect, &fragment); + jumps.push(self.push_op(OpCode::OpJumpIfNotFound(JumpOffset(0)), &fragment)); + } + + let final_jump = self.push_op(OpCode::OpJump(JumpOffset(0)), &path); + + for jump in jumps { + self.patch_jump(jump); + } + + // Compile the default value expression and patch the final + // jump to point *beyond* it. + self.compile(slot, default); + self.patch_jump(final_jump); + } + + /// Compile `assert` expressions using jumping instructions in the VM. + /// + /// ```notrust + /// ┌─────────────────────┐ + /// │ 0 [ conditional ] │ + /// │ 1 JUMP_IF_FALSE →┼─┐ + /// │ 2 [ main body ] │ │ Jump to else body if + /// ┌┼─3─← JUMP │ │ condition is false. + /// Jump over else body ││ 4 OP_ASSERT_FAIL ←┼─┘ + /// if condition is true.└┼─5─→ ... │ + /// └─────────────────────┘ + /// ``` + fn compile_assert(&mut self, slot: LocalIdx, node: &ast::Assert) { + // Compile the assertion condition to leave its value on the stack. + self.compile(slot, node.condition().unwrap()); + self.emit_force(&node.condition().unwrap()); + let then_idx = self.push_op(OpCode::OpJumpIfFalse(JumpOffset(0)), node); + + self.push_op(OpCode::OpPop, node); + self.compile(slot, node.body().unwrap()); + + let else_idx = self.push_op(OpCode::OpJump(JumpOffset(0)), node); + + self.patch_jump(then_idx); + self.push_op(OpCode::OpPop, node); + self.push_op(OpCode::OpAssertFail, &node.condition().unwrap()); + + self.patch_jump(else_idx); + } + + /// Compile conditional expressions using jumping instructions in the VM. + /// + /// ```notrust + /// ┌────────────────────┐ + /// │ 0 [ conditional ] │ + /// │ 1 JUMP_IF_FALSE →┼─┐ + /// │ 2 [ main body ] │ │ Jump to else body if + /// ┌┼─3─← JUMP │ │ condition is false. + /// Jump over else body ││ 4 [ else body ]←┼─┘ + /// if condition is true.└┼─5─→ ... │ + /// └────────────────────┘ + /// ``` + fn compile_if_else(&mut self, slot: LocalIdx, node: &ast::IfElse) { + self.compile(slot, node.condition().unwrap()); + self.emit_force(&node.condition().unwrap()); + + let then_idx = self.push_op( + OpCode::OpJumpIfFalse(JumpOffset(0)), + &node.condition().unwrap(), + ); + + self.push_op(OpCode::OpPop, node); // discard condition value + self.compile(slot, node.body().unwrap()); + + let else_idx = self.push_op(OpCode::OpJump(JumpOffset(0)), node); + + self.patch_jump(then_idx); // patch jump *to* else_body + self.push_op(OpCode::OpPop, node); // discard condition value + self.compile(slot, node.else_body().unwrap()); + + self.patch_jump(else_idx); // patch jump *over* else body + } + + /// Compile `with` expressions by emitting instructions that + /// pop/remove the indices of attribute sets that are implicitly + /// in scope through `with` on the "with-stack". + fn compile_with(&mut self, slot: LocalIdx, node: &ast::With) { + self.scope_mut().begin_scope(); + // TODO: Detect if the namespace is just an identifier, and + // resolve that directly (thus avoiding duplication on the + // stack). + self.compile(slot, node.namespace().unwrap()); + + let span = self.span_for(&node.namespace().unwrap()); + + // The attribute set from which `with` inherits values + // occupies a slot on the stack, but this stack slot is not + // directly accessible. As it must be accounted for to + // calculate correct offsets, what we call a "phantom" local + // is declared here. + let local_idx = self.scope_mut().declare_phantom(span, true); + let with_idx = self.scope().stack_index(local_idx); + + self.scope_mut().push_with(); + + self.push_op(OpCode::OpPushWith(with_idx), &node.namespace().unwrap()); + + self.compile(slot, node.body().unwrap()); + + self.push_op(OpCode::OpPopWith, node); + self.scope_mut().pop_with(); + self.cleanup_scope(node); + } + + /// Compiles pattern function arguments, such as `{ a, b }: ...`. + /// + /// These patterns are treated as a special case of locals binding + /// where the attribute set itself is placed on the first stack + /// slot of the call frame (either as a phantom, or named in case + /// of an `@` binding), and the function call sets up the rest of + /// the stack as if the parameters were rewritten into a `let` + /// binding. + /// + /// For example: + /// + /// ```nix + /// ({ a, b ? 2, c ? a * b, ... }@args: <body>) { a = 10; } + /// ``` + /// + /// would be compiled similarly to a binding such as + /// + /// ```nix + /// let args = { a = 10; }; + /// in let a = args.a; + /// b = args.a or 2; + /// c = args.c or a * b; + /// in <body> + /// ``` + /// + /// The only tricky bit being that bindings have to fail if too + /// many arguments are provided. This is done by emitting a + /// special instruction that checks the set of keys from a + /// constant containing the expected keys. + fn compile_param_pattern(&mut self, pattern: &ast::Pattern) -> Formals { + let span = self.span_for(pattern); + let set_idx = match pattern.pat_bind() { + Some(name) => self.declare_local(&name, name.ident().unwrap().to_string()), + None => self.scope_mut().declare_phantom(span, true), + }; + + // At call time, the attribute set is already at the top of + // the stack. + self.scope_mut().mark_initialised(set_idx); + self.emit_force(pattern); + + let ellipsis = pattern.ellipsis_token().is_some(); + if !ellipsis { + self.push_op(OpCode::OpValidateClosedFormals, pattern); + } + + // Similar to `let ... in ...`, we now do multiple passes over + // the bindings to first declare them, then populate them, and + // then finalise any necessary recursion into the scope. + let mut entries: Vec<(LocalIdx, ast::PatEntry)> = vec![]; + let mut indices: Vec<LocalIdx> = vec![]; + let mut arguments = HashMap::default(); + + for entry in pattern.pat_entries() { + let ident = entry.ident().unwrap(); + let idx = self.declare_local(&ident, ident.to_string()); + let has_default = entry.default().is_some(); + entries.push((idx, entry)); + indices.push(idx); + arguments.insert(ident.into(), has_default); + } + + // For each of the bindings, push the set on the stack and + // attempt to select from it. + let stack_idx = self.scope().stack_index(set_idx); + for (idx, entry) in entries.into_iter() { + self.push_op(OpCode::OpGetLocal(stack_idx), pattern); + self.emit_literal_ident(&entry.ident().unwrap()); + + // Use the same mechanism as `compile_select_or` if a + // default value was provided, or simply select otherwise. + if let Some(default_expr) = entry.default() { + self.push_op(OpCode::OpAttrsTrySelect, &entry.ident().unwrap()); + + let jump_to_default = + self.push_op(OpCode::OpJumpIfNotFound(JumpOffset(0)), &default_expr); + + let jump_over_default = self.push_op(OpCode::OpJump(JumpOffset(0)), &default_expr); + + self.patch_jump(jump_to_default); + + // Thunk the default expression, but only if it is something + // other than an identifier. + if let ast::Expr::Ident(_) = &default_expr { + self.compile(idx, default_expr); + } else { + self.thunk(idx, &self.span_for(&default_expr), move |c, s| { + c.compile(s, default_expr) + }); + } + + self.patch_jump(jump_over_default); + } else { + self.push_op(OpCode::OpAttrsSelect, &entry.ident().unwrap()); + } + + self.scope_mut().mark_initialised(idx); + } + + for idx in indices { + if self.scope()[idx].needs_finaliser { + let stack_idx = self.scope().stack_index(idx); + self.push_op(OpCode::OpFinalise(stack_idx), pattern); + } + } + + Formals { + arguments, + ellipsis, + span, + } + } + + fn compile_lambda(&mut self, slot: LocalIdx, node: &ast::Lambda) { + // Compile the function itself, recording its formal arguments (if any) + // for later use + let formals = match node.param().unwrap() { + ast::Param::Pattern(pat) => Some(self.compile_param_pattern(&pat)), + + ast::Param::IdentParam(param) => { + let name = param + .ident() + .unwrap() + .ident_token() + .unwrap() + .text() + .to_string(); + + let idx = self.declare_local(¶m, &name); + self.scope_mut().mark_initialised(idx); + None + } + }; + + self.compile(slot, node.body().unwrap()); + self.context_mut().lambda.formals = formals; + } + + fn thunk<N, F>(&mut self, outer_slot: LocalIdx, node: &N, content: F) + where + N: ToSpan, + F: FnOnce(&mut Compiler, LocalIdx), + { + self.compile_lambda_or_thunk(true, outer_slot, node, content) + } + + /// Mark the current thunk as redundant, i.e. possible to merge directly + /// into its parent lambda context without affecting runtime behaviour. + fn unthunk(&mut self) { + self.context_mut().unthunk = true; + } + + /// Compile an expression into a runtime closure or thunk + fn compile_lambda_or_thunk<N, F>( + &mut self, + is_suspended_thunk: bool, + outer_slot: LocalIdx, + node: &N, + content: F, + ) where + N: ToSpan, + F: FnOnce(&mut Compiler, LocalIdx), + { + let name = self.scope()[outer_slot].name(); + self.new_context(); + + // Set the (optional) name of the current slot on the lambda that is + // being compiled. + self.context_mut().lambda.name = name; + + let span = self.span_for(node); + let slot = self.scope_mut().declare_phantom(span, false); + self.scope_mut().begin_scope(); + + content(self, slot); + self.cleanup_scope(node); + + // TODO: determine and insert enclosing name, if available. + + // Pop the lambda context back off, and emit the finished + // lambda as a constant. + let mut compiled = self.contexts.pop().unwrap(); + + // The compiler might have decided to unthunk, i.e. raise the compiled + // code to the parent context. In that case we do so and return right + // away. + if compiled.unthunk && is_suspended_thunk { + self.chunk().extend(compiled.lambda.chunk); + return; + } + + // Emit an instruction to inform the VM that the chunk has ended. + compiled + .lambda + .chunk + .push_op(OpCode::OpReturn, self.span_for(node)); + + // Capturing the with stack counts as an upvalue, as it is + // emitted as an upvalue data instruction. + if compiled.captures_with_stack { + compiled.lambda.upvalue_count += 1; + } + + let lambda = Rc::new(compiled.lambda); + if is_suspended_thunk { + self.observer.observe_compiled_thunk(&lambda); + } else { + self.observer.observe_compiled_lambda(&lambda); + } + + // If no upvalues are captured, emit directly and move on. + if lambda.upvalue_count == 0 { + self.emit_constant( + if is_suspended_thunk { + Value::Thunk(Thunk::new_suspended(lambda, LightSpan::new_actual(span))) + } else { + Value::Closure(Rc::new(Closure::new(lambda))) + }, + node, + ); + return; + } + + // Otherwise, we need to emit the variable number of + // operands that allow the runtime to close over the + // upvalues and leave a blueprint in the constant index from + // which the result can be constructed. + let blueprint_idx = self.chunk().push_constant(Value::Blueprint(lambda)); + + let code_idx = self.push_op( + if is_suspended_thunk { + OpCode::OpThunkSuspended(blueprint_idx) + } else { + OpCode::OpThunkClosure(blueprint_idx) + }, + node, + ); + + self.emit_upvalue_data( + outer_slot, + node, + compiled.scope.upvalues, + compiled.captures_with_stack, + ); + + if !is_suspended_thunk && !self.scope()[outer_slot].needs_finaliser { + if !self.scope()[outer_slot].must_thunk { + // The closure has upvalues, but is not recursive. Therefore no thunk is required, + // which saves us the overhead of Rc<RefCell<>> + self.chunk()[code_idx] = OpCode::OpClosure(blueprint_idx); + } else { + // This case occurs when a closure has upvalue-references to itself but does not need a + // finaliser. Since no OpFinalise will be emitted later on we synthesize one here. + // It is needed here only to set [`Closure::is_finalised`] which is used for sanity checks. + #[cfg(debug_assertions)] + self.push_op( + OpCode::OpFinalise(self.scope().stack_index(outer_slot)), + &self.span_for(node), + ); + } + } + } + + fn compile_apply(&mut self, slot: LocalIdx, node: &ast::Apply) { + // To call a function, we leave its arguments on the stack, + // followed by the function expression itself, and then emit a + // call instruction. This way, the stack is perfectly laid out + // to enter the function call straight away. + self.compile(slot, node.argument().unwrap()); + self.compile(slot, node.lambda().unwrap()); + self.emit_force(&node.lambda().unwrap()); + self.push_op(OpCode::OpCall, node); + } + + /// Emit the data instructions that the runtime needs to correctly + /// assemble the upvalues struct. + fn emit_upvalue_data<T: ToSpan>( + &mut self, + slot: LocalIdx, + node: &T, + upvalues: Vec<Upvalue>, + capture_with: bool, + ) { + for upvalue in upvalues { + match upvalue.kind { + UpvalueKind::Local(idx) => { + let target = &self.scope()[idx]; + let stack_idx = self.scope().stack_index(idx); + + // If the target is not yet initialised, we need to defer + // the local access + if !target.initialised { + self.push_op(OpCode::DataDeferredLocal(stack_idx), &upvalue.span); + self.scope_mut().mark_needs_finaliser(slot); + } else { + // a self-reference + if slot == idx { + self.scope_mut().mark_must_thunk(slot); + } + self.push_op(OpCode::DataStackIdx(stack_idx), &upvalue.span); + } + } + + UpvalueKind::Upvalue(idx) => { + self.push_op(OpCode::DataUpvalueIdx(idx), &upvalue.span); + } + }; + } + + if capture_with { + // TODO(tazjin): probably better to emit span for the ident that caused this + self.push_op(OpCode::DataCaptureWith, node); + } + } + + /// Emit the literal string value of an identifier. Required for + /// several operations related to attribute sets, where + /// identifiers are used as string keys. + fn emit_literal_ident(&mut self, ident: &ast::Ident) { + self.emit_constant(Value::String(ident.clone().into()), ident); + } + + /// Patch the jump instruction at the given index, setting its + /// jump offset from the placeholder to the current code position. + /// + /// This is required because the actual target offset of jumps is + /// not known at the time when the jump operation itself is + /// emitted. + fn patch_jump(&mut self, idx: CodeIdx) { + let offset = JumpOffset(self.chunk().code.len() - 1 - idx.0); + + match &mut self.chunk().code[idx.0] { + OpCode::OpJump(n) + | OpCode::OpJumpIfFalse(n) + | OpCode::OpJumpIfTrue(n) + | OpCode::OpJumpIfNotFound(n) => { + *n = offset; + } + + op => panic!("attempted to patch unsupported op: {:?}", op), + } + } + + /// Decrease scope depth of the current function and emit + /// instructions to clean up the stack at runtime. + fn cleanup_scope<N: ToSpan>(&mut self, node: &N) { + // When ending a scope, all corresponding locals need to be + // removed, but the value of the body needs to remain on the + // stack. This is implemented by a separate instruction. + let (popcount, unused_spans) = self.scope_mut().end_scope(); + + for span in &unused_spans { + self.emit_warning(span, WarningKind::UnusedBinding); + } + + if popcount > 0 { + self.push_op(OpCode::OpCloseScope(Count(popcount)), node); + } + } + + /// Open a new lambda context within which to compile a function, + /// closure or thunk. + fn new_context(&mut self) { + self.contexts.push(self.context().inherit()); + } + + /// Declare a local variable known in the scope that is being + /// compiled by pushing it to the locals. This is used to + /// determine the stack offset of variables. + fn declare_local<S: Into<String>, N: ToSpan>(&mut self, node: &N, name: S) -> LocalIdx { + let name = name.into(); + let depth = self.scope().scope_depth(); + + // Do this little dance to turn name:&'a str into the same + // string with &'static lifetime, as required by WarningKind + if let Some((global_ident, _)) = self.globals.get_key_value(name.as_str()) { + self.emit_warning(node, WarningKind::ShadowedGlobal(global_ident)); + } + + let span = self.span_for(node); + let (idx, shadowed) = self.scope_mut().declare_local(name, span); + + if let Some(shadow_idx) = shadowed { + let other = &self.scope()[shadow_idx]; + if other.depth == depth { + self.emit_error(node, ErrorKind::VariableAlreadyDefined(other.span)); + } + } + + idx + } + + /// Determine whether the current lambda context has any ancestors + /// that use dynamic scope resolution, and mark contexts as + /// needing to capture their enclosing `with`-stack in their + /// upvalues. + fn has_dynamic_ancestor(&mut self) -> bool { + let mut ancestor_has_with = false; + + for ctx in self.contexts.iter_mut() { + if ancestor_has_with { + // If the ancestor has an active with stack, mark this + // lambda context as needing to capture it. + ctx.captures_with_stack = true; + } else { + // otherwise, check this context and move on + ancestor_has_with = ctx.scope.has_with(); + } + } + + ancestor_has_with + } + + fn emit_force<N: ToSpan>(&mut self, node: &N) { + self.push_op(OpCode::OpForce, node); + } + + fn emit_warning<N: ToSpan>(&mut self, node: &N, kind: WarningKind) { + let span = self.span_for(node); + self.warnings.push(EvalWarning { kind, span }) + } + + fn emit_error<N: ToSpan>(&mut self, node: &N, kind: ErrorKind) { + let span = self.span_for(node); + self.errors.push(Error::new(kind, span)) + } +} + +/// Convert a non-dynamic string expression to a string if possible. +fn expr_static_str(node: &ast::Str) -> Option<SmolStr> { + let mut parts = node.normalized_parts(); + + if parts.len() != 1 { + return None; + } + + if let Some(ast::InterpolPart::Literal(lit)) = parts.pop() { + return Some(SmolStr::new(lit)); + } + + None +} + +/// Convert the provided `ast::Attr` into a statically known string if +/// possible. +fn expr_static_attr_str(node: &ast::Attr) -> Option<SmolStr> { + match node { + ast::Attr::Ident(ident) => Some(ident.ident_token().unwrap().text().into()), + ast::Attr::Str(s) => expr_static_str(s), + + // The dynamic node type is just a wrapper. C++ Nix does not care + // about the dynamic wrapper when determining whether the node + // itself is dynamic, it depends solely on the expression inside + // (i.e. `let ${"a"} = 1; in a` is valid). + ast::Attr::Dynamic(ref dynamic) => match dynamic.expr().unwrap() { + ast::Expr::Str(s) => expr_static_str(&s), + _ => None, + }, + } +} + +/// Create a delayed source-only builtin compilation, for a builtin +/// which is written in Nix code. +/// +/// **Important:** tvix *panics* if a builtin with invalid source code +/// is supplied. This is because there is no user-friendly way to +/// thread the errors out of this function right now. +fn compile_src_builtin( + name: &'static str, + code: &str, + source: &SourceCode, + weak: &Weak<GlobalsMap>, +) -> Value { + use std::fmt::Write; + + let parsed = rnix::ast::Root::parse(code); + + if !parsed.errors().is_empty() { + let mut out = format!("BUG: code for source-builtin '{}' had parser errors", name); + for error in parsed.errors() { + writeln!(out, "{}", error).unwrap(); + } + + panic!("{}", out); + } + + let file = source.add_file(format!("<src-builtins/{}.nix>", name), code.to_string()); + let weak = weak.clone(); + + Value::Thunk(Thunk::new_suspended_native(Box::new(move || { + let result = compile( + &parsed.tree().expr().unwrap(), + None, + file.clone(), + weak.upgrade().unwrap(), + &mut crate::observer::NoOpObserver {}, + ) + .map_err(|e| ErrorKind::NativeError { + gen_type: "derivation", + err: Box::new(e), + })?; + + if !result.errors.is_empty() { + return Err(ErrorKind::ImportCompilerError { + path: format!("src-builtins/{}.nix", name).into(), + errors: result.errors, + }); + } + + Ok(Value::Thunk(Thunk::new_suspended( + result.lambda, + LightSpan::Actual { span: file.span }, + ))) + }))) +} + +/// Prepare the full set of globals available in evaluated code. These +/// are constructed from the set of builtins supplied by the caller, +/// which are made available globally under the `builtins` identifier. +/// +/// A subset of builtins (specified by [`GLOBAL_BUILTINS`]) is +/// available globally *iff* they are set. +/// +/// Optionally adds the `import` feature if desired by the caller. +pub fn prepare_globals( + builtins: Vec<(&'static str, Value)>, + src_builtins: Vec<(&'static str, &'static str)>, + source: SourceCode, + enable_import: bool, +) -> Rc<GlobalsMap> { + Rc::new_cyclic(Box::new(move |weak: &Weak<GlobalsMap>| { + // First step is to construct the builtins themselves as + // `NixAttrs`. + let mut builtins: GlobalsMap = HashMap::from_iter(builtins.into_iter()); + + // At this point, optionally insert `import` if enabled. To + // "tie the knot" of `import` needing the full set of globals + // to instantiate its compiler, the `Weak` reference is passed + // here. + if enable_import { + let import = Value::Builtin(import::builtins_import(weak, source.clone())); + builtins.insert("import", import); + } + + // Next, the actual map of globals which the compiler will use + // to resolve identifiers is constructed. + let mut globals: GlobalsMap = HashMap::new(); + + // builtins contain themselves (`builtins.builtins`), which we + // can resolve by manually constructing a suspended thunk that + // dereferences the same weak pointer as above. + let weak_globals = weak.clone(); + builtins.insert( + "builtins", + Value::Thunk(Thunk::new_suspended_native(Box::new(move || { + Ok(weak_globals + .upgrade() + .unwrap() + .get("builtins") + .cloned() + .unwrap()) + }))), + ); + + // Insert top-level static value builtins. + globals.insert("true", Value::Bool(true)); + globals.insert("false", Value::Bool(false)); + globals.insert("null", Value::Null); + + // If "source builtins" were supplied, compile them and insert + // them. + builtins.extend(src_builtins.into_iter().map(move |(name, code)| { + let compiled = compile_src_builtin(name, code, &source, weak); + (name, compiled) + })); + + // Construct the actual `builtins` attribute set and insert it + // in the global scope. + globals.insert( + "builtins", + Value::attrs(NixAttrs::from_iter(builtins.clone().into_iter())), + ); + + // Finally, the builtins that should be globally available are + // "elevated" to the outer scope. + for global in GLOBAL_BUILTINS { + if let Some(builtin) = builtins.get(global).cloned() { + globals.insert(global, builtin); + } + } + + globals + })) +} + +pub fn compile( + expr: &ast::Expr, + location: Option<PathBuf>, + file: Arc<codemap::File>, + globals: Rc<GlobalsMap>, + observer: &mut dyn CompilerObserver, +) -> EvalResult<CompilationOutput> { + let mut c = Compiler::new(location, file, globals.clone(), observer)?; + + let root_span = c.span_for(expr); + let root_slot = c.scope_mut().declare_phantom(root_span, false); + c.compile(root_slot, expr.clone()); + + // The final operation of any top-level Nix program must always be + // `OpForce`. A thunk should not be returned to the user in an + // unevaluated state (though in practice, a value *containing* a + // thunk might be returned). + c.emit_force(expr); + c.push_op(OpCode::OpReturn, &root_span); + + let lambda = Rc::new(c.contexts.pop().unwrap().lambda); + c.observer.observe_compiled_toplevel(&lambda); + + Ok(CompilationOutput { + lambda, + warnings: c.warnings, + errors: c.errors, + globals, + }) +} diff --git a/tvix/eval/src/compiler/optimiser.rs b/tvix/eval/src/compiler/optimiser.rs new file mode 100644 index 000000000000..48960d355cc6 --- /dev/null +++ b/tvix/eval/src/compiler/optimiser.rs @@ -0,0 +1,125 @@ +//! Helper functions for extending the compiler with more linter-like +//! functionality while compiling (i.e. smarter warnings). + +use super::*; + +use ast::Expr; + +/// Optimise the given expression where possible. +pub(super) fn optimise_expr(c: &mut Compiler, slot: LocalIdx, expr: ast::Expr) -> ast::Expr { + match expr { + Expr::BinOp(_) => optimise_bin_op(c, slot, expr), + _ => expr, + } +} + +enum LitBool { + Expr(Expr), + True(Expr), + False(Expr), +} + +/// Is this a literal boolean, or something else? +fn is_lit_bool(expr: ast::Expr) -> LitBool { + if let ast::Expr::Ident(ident) = &expr { + match ident.ident_token().unwrap().text() { + "true" => LitBool::True(expr), + "false" => LitBool::False(expr), + _ => LitBool::Expr(expr), + } + } else { + LitBool::Expr(expr) + } +} + +/// Detect useless binary operations (i.e. useless bool comparisons). +fn optimise_bin_op(c: &mut Compiler, slot: LocalIdx, expr: ast::Expr) -> ast::Expr { + use ast::BinOpKind; + + // bail out of this check if the user has overridden either `true` + // or `false` identifiers. Note that they will have received a + // separate warning about this for shadowing the global(s). + if c.is_user_defined("true") || c.is_user_defined("false") { + return expr; + } + + if let Expr::BinOp(op) = &expr { + let lhs = is_lit_bool(op.lhs().unwrap()); + let rhs = is_lit_bool(op.rhs().unwrap()); + + match (op.operator().unwrap(), lhs, rhs) { + // useless `false` arm in `||` expression + (BinOpKind::Or, LitBool::False(f), LitBool::Expr(other)) + | (BinOpKind::Or, LitBool::Expr(other), LitBool::False(f)) => { + c.emit_warning( + &f, + WarningKind::UselessBoolOperation( + "this `false` has no effect on the result of the comparison", + ), + ); + + return other; + } + + // useless `true` arm in `&&` expression + (BinOpKind::And, LitBool::True(t), LitBool::Expr(other)) + | (BinOpKind::And, LitBool::Expr(other), LitBool::True(t)) => { + c.emit_warning( + &t, + WarningKind::UselessBoolOperation( + "this `true` has no effect on the result of the comparison", + ), + ); + + return other; + } + + // useless `||` expression (one arm is `true`), return + // `true` directly (and warn about dead code on the right) + (BinOpKind::Or, LitBool::True(t), LitBool::Expr(other)) => { + c.emit_warning( + op, + WarningKind::UselessBoolOperation("this expression is always true"), + ); + + c.compile_dead_code(slot, other); + + return t; + } + + (BinOpKind::Or, _, LitBool::True(t)) | (BinOpKind::Or, LitBool::True(t), _) => { + c.emit_warning( + op, + WarningKind::UselessBoolOperation("this expression is always true"), + ); + + return t; + } + + // useless `&&` expression (one arm is `false), same as above + (BinOpKind::And, LitBool::False(f), LitBool::Expr(other)) => { + c.emit_warning( + op, + WarningKind::UselessBoolOperation("this expression is always false"), + ); + + c.compile_dead_code(slot, other); + + return f; + } + + (BinOpKind::And, _, LitBool::False(f)) | (BinOpKind::Or, LitBool::False(f), _) => { + c.emit_warning( + op, + WarningKind::UselessBoolOperation("this expression is always false"), + ); + + return f; + } + + _ => { /* nothing to optimise */ } + } + } + + expr +} diff --git a/tvix/eval/src/compiler/scope.rs b/tvix/eval/src/compiler/scope.rs new file mode 100644 index 000000000000..892727c107c9 --- /dev/null +++ b/tvix/eval/src/compiler/scope.rs @@ -0,0 +1,378 @@ +//! This module implements the scope-tracking logic of the Tvix +//! compiler. +//! +//! Scoping in Nix is fairly complicated, there are features like +//! mutually recursive bindings, `with`, upvalue capturing, and so +//! on that introduce a fair bit of complexity. +//! +//! Tvix attempts to do as much of the heavy lifting of this at +//! compile time, and leave the runtime to mostly deal with known +//! stack indices. To do this, the compiler simulates where locals +//! will be at runtime using the data structures implemented here. + +use std::{ + collections::{hash_map, HashMap}, + ops::Index, +}; + +use smol_str::SmolStr; + +use crate::opcode::{StackIdx, UpvalueIdx}; + +#[derive(Debug)] +enum LocalName { + /// Normally declared local with a statically known name. + Ident(String), + + /// Phantom stack value (e.g. attribute set used for `with`) that + /// must be accounted for to calculate correct stack offsets. + Phantom, +} + +/// Represents a single local already known to the compiler. +#[derive(Debug)] +pub struct Local { + /// Identifier of this local. This is always a statically known + /// value (Nix does not allow dynamic identifier names in locals), + /// or a "phantom" value not accessible by users. + name: LocalName, + + /// Source span at which this local was declared. + pub span: codemap::Span, + + /// Scope depth of this local. + pub depth: usize, + + /// Is this local initialised? + pub initialised: bool, + + /// Is this local known to have been used at all? + pub used: bool, + + /// Does this local need to be finalised after the enclosing scope + /// is completely constructed? + pub needs_finaliser: bool, + + /// Does this local's upvalues contain a reference to itself? + pub must_thunk: bool, +} + +impl Local { + /// Retrieve the name of the given local (if available). + pub fn name(&self) -> Option<SmolStr> { + match &self.name { + LocalName::Phantom => None, + LocalName::Ident(name) => Some(SmolStr::new(name)), + } + } + + /// Is this local intentionally ignored? (i.e. name starts with `_`) + pub fn is_ignored(&self) -> bool { + match &self.name { + LocalName::Ident(name) => name.starts_with('_'), + LocalName::Phantom => false, + } + } +} + +/// Represents the current position of an identifier as resolved in a scope. +pub enum LocalPosition { + /// Local is not known in this scope. + Unknown, + + /// Local is known at the given local index. + Known(LocalIdx), + + /// Local is known, but is being accessed recursively within its + /// own initialisation. Depending on context, this is either an + /// error or forcing a closure/thunk. + Recursive(LocalIdx), +} + +/// Represents the different ways in which upvalues can be captured in +/// closures or thunks. +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum UpvalueKind { + /// This upvalue captures a local from the stack. + Local(LocalIdx), + + /// This upvalue captures an enclosing upvalue. + Upvalue(UpvalueIdx), +} + +#[derive(Clone, Debug)] +pub struct Upvalue { + pub kind: UpvalueKind, + pub span: codemap::Span, +} + +/// The index of a local in the scope's local array at compile time. +#[repr(transparent)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd)] +pub struct LocalIdx(usize); + +/// Helper struct for indexing over `Scope::locals` by name. +#[derive(Debug)] +enum ByName { + Single(LocalIdx), + Shadowed(Vec<LocalIdx>), +} + +impl ByName { + /// Add an additional index for this name. + fn add_idx(&mut self, new: LocalIdx) { + match self { + ByName::Shadowed(indices) => indices.push(new), + ByName::Single(idx) => { + *self = ByName::Shadowed(vec![*idx, new]); + } + } + } + + /// Remove the most recent index for this name, unless it is a + /// single. Returns `true` if an entry was removed. + fn remove_idx(&mut self) -> bool { + match self { + ByName::Single(_) => false, + ByName::Shadowed(indices) => match indices[..] { + [fst, _snd] => { + *self = ByName::Single(fst); + true + } + _ => { + indices.pop(); + true + } + }, + } + } + + /// Return the most recent index. + pub fn index(&self) -> LocalIdx { + match self { + ByName::Single(idx) => *idx, + ByName::Shadowed(vec) => *vec.last().unwrap(), + } + } +} + +/// Represents a scope known during compilation, which can be resolved +/// directly to stack indices. +#[derive(Debug, Default)] +pub struct Scope { + locals: Vec<Local>, + pub upvalues: Vec<Upvalue>, + + /// Secondary by-name index over locals. + by_name: HashMap<String, ByName>, + + /// How many scopes "deep" are these locals? + scope_depth: usize, + + /// Current size of the `with`-stack at runtime. + with_stack_size: usize, +} + +impl Index<LocalIdx> for Scope { + type Output = Local; + + fn index(&self, index: LocalIdx) -> &Self::Output { + &self.locals[index.0] + } +} + +impl Scope { + /// Inherit scope details from a parent scope (required for + /// correctly nesting scopes in lambdas and thunks when special + /// scope features like dynamic resolution are present). + pub fn inherit(&self) -> Self { + Self { + scope_depth: self.scope_depth + 1, + with_stack_size: self.with_stack_size, + ..Default::default() + } + } + + /// Increase the `with`-stack size of this scope. + pub fn push_with(&mut self) { + self.with_stack_size += 1; + } + + /// Decrease the `with`-stack size of this scope. + pub fn pop_with(&mut self) { + self.with_stack_size -= 1; + } + + /// Does this scope currently require dynamic runtime resolution + /// of identifiers that could not be found? + pub fn has_with(&self) -> bool { + self.with_stack_size > 0 + } + + /// Resolve the stack index of a statically known local. + pub fn resolve_local(&mut self, name: &str) -> LocalPosition { + if let Some(by_name) = self.by_name.get(name) { + let idx = by_name.index(); + let local = self + .locals + .get_mut(idx.0) + .expect("invalid compiler state: indexed local missing"); + + local.used = true; + + // This local is still being initialised, meaning that + // we know its final runtime stack position, but it is + // not yet on the stack. + if !local.initialised { + return LocalPosition::Recursive(idx); + } + + return LocalPosition::Known(idx); + } + + LocalPosition::Unknown + } + + /// Declare a local variable that occupies a stack slot and should + /// be accounted for, but is not directly accessible by users + /// (e.g. attribute sets used for `with`). + pub fn declare_phantom(&mut self, span: codemap::Span, initialised: bool) -> LocalIdx { + let idx = self.locals.len(); + self.locals.push(Local { + initialised, + span, + name: LocalName::Phantom, + depth: self.scope_depth, + needs_finaliser: false, + must_thunk: false, + used: true, + }); + + LocalIdx(idx) + } + + /// Declare an uninitialised, named local variable. + /// + /// Returns the `LocalIdx` of the new local, and optionally the + /// index of a previous local shadowed by this one. + pub fn declare_local( + &mut self, + name: String, + span: codemap::Span, + ) -> (LocalIdx, Option<LocalIdx>) { + let idx = LocalIdx(self.locals.len()); + self.locals.push(Local { + name: LocalName::Ident(name.clone()), + span, + depth: self.scope_depth, + initialised: false, + needs_finaliser: false, + must_thunk: false, + used: false, + }); + + let mut shadowed = None; + match self.by_name.entry(name) { + hash_map::Entry::Occupied(mut entry) => { + let existing = entry.get_mut(); + shadowed = Some(existing.index()); + existing.add_idx(idx); + } + hash_map::Entry::Vacant(entry) => { + entry.insert(ByName::Single(idx)); + } + } + + (idx, shadowed) + } + + /// Mark local as initialised after compiling its expression. + pub fn mark_initialised(&mut self, idx: LocalIdx) { + self.locals[idx.0].initialised = true; + } + + /// Mark local as needing a finaliser. + pub fn mark_needs_finaliser(&mut self, idx: LocalIdx) { + self.locals[idx.0].needs_finaliser = true; + } + + /// Mark local as must be wrapped in a thunk. This happens if + /// the local has a reference to itself in its upvalues. + pub fn mark_must_thunk(&mut self, idx: LocalIdx) { + self.locals[idx.0].must_thunk = true; + } + + /// Compute the runtime stack index for a given local by + /// accounting for uninitialised variables at scopes below this + /// one. + pub fn stack_index(&self, idx: LocalIdx) -> StackIdx { + let uninitialised_count = self.locals[..(idx.0)] + .iter() + .filter(|l| !l.initialised && self[idx].depth > l.depth) + .count(); + + StackIdx(idx.0 - uninitialised_count) + } + + /// Increase the current scope depth (e.g. within a new bindings + /// block, or `with`-scope). + pub fn begin_scope(&mut self) { + self.scope_depth += 1; + } + + /// Decrease the scope depth and remove all locals still tracked + /// for the current scope. + /// + /// Returns the count of locals that were dropped while marked as + /// initialised (used by the compiler to determine whether to emit + /// scope cleanup operations), as well as the spans of the + /// definitions of unused locals (used by the compiler to emit + /// unused binding warnings). + pub fn end_scope(&mut self) -> (usize, Vec<codemap::Span>) { + debug_assert!(self.scope_depth != 0, "can not end top scope"); + + let mut pops = 0; + let mut unused_spans = vec![]; + + // TL;DR - iterate from the back while things belonging to the + // ended scope still exist. + while self.locals.last().unwrap().depth == self.scope_depth { + if let Some(local) = self.locals.pop() { + // pop the local from the stack if it was actually + // initialised + if local.initialised { + pops += 1; + } + + // analyse whether the local was accessed during its + // lifetime, and emit a warning otherwise (unless the + // user explicitly chose to ignore it by prefixing the + // identifier with `_`) + if !local.used && !local.is_ignored() { + unused_spans.push(local.span); + } + + // remove the by-name index if this was a named local + if let LocalName::Ident(name) = local.name { + if let hash_map::Entry::Occupied(mut entry) = self.by_name.entry(name) { + // If no removal occured through `remove_idx` + // (i.e. there was no shadowing going on), + // nuke the whole entry. + if !entry.get_mut().remove_idx() { + entry.remove(); + } + } + } + } + } + + self.scope_depth -= 1; + + (pops, unused_spans) + } + + /// Access the current scope depth. + pub fn scope_depth(&self) -> usize { + self.scope_depth + } +} diff --git a/tvix/eval/src/errors.rs b/tvix/eval/src/errors.rs new file mode 100644 index 000000000000..2fbb6496ceea --- /dev/null +++ b/tvix/eval/src/errors.rs @@ -0,0 +1,1054 @@ +use std::error; +use std::io; +use std::path::PathBuf; +use std::rc::Rc; +use std::str::Utf8Error; +use std::string::FromUtf8Error; +use std::sync::Arc; +use std::{fmt::Debug, fmt::Display, num::ParseIntError}; + +use codemap::{File, Span}; +use codemap_diagnostic::{ColorConfig, Diagnostic, Emitter, Level, SpanLabel, SpanStyle}; +use smol_str::SmolStr; +use xml::writer::Error as XmlError; + +use crate::spans::ToSpan; +use crate::value::{CoercionKind, NixString}; +use crate::{SourceCode, Value}; + +#[derive(Clone, Debug)] +pub enum ErrorKind { + /// These are user-generated errors through builtins. + Throw(String), + Abort(String), + AssertionFailed, + + DivisionByZero, + + DuplicateAttrsKey { + key: String, + }, + + /// Attempted to specify an invalid key type (e.g. integer) in a + /// dynamic attribute name. + InvalidAttributeName(Value), + + AttributeNotFound { + name: String, + }, + + /// Attempted to index into a list beyond its boundaries. + IndexOutOfBounds { + index: i64, + }, + + /// Attempted to call `builtins.tail` on an empty list. + TailEmptyList, + + TypeError { + expected: &'static str, + actual: &'static str, + }, + + Incomparable { + lhs: &'static str, + rhs: &'static str, + }, + + /// Resolving a user-supplied angle brackets path literal failed in some way. + NixPathResolution(String), + + /// Resolving a user-supplied relative or home-relative path literal failed in some way. + RelativePathResolution(String), + + /// Dynamic keys are not allowed in some scopes. + DynamicKeyInScope(&'static str), + + /// Unknown variable in statically known scope. + UnknownStaticVariable, + + /// Unknown variable in dynamic scope (with, rec, ...). + UnknownDynamicVariable(String), + + /// User is defining the same variable twice at the same depth. + VariableAlreadyDefined(Span), + + /// Attempt to call something that is not callable. + NotCallable(&'static str), + + /// Infinite recursion encountered while forcing thunks. + InfiniteRecursion { + first_force: Span, + suspended_at: Option<Span>, + content_span: Option<Span>, + }, + + ParseErrors(Vec<rnix::parser::ParseError>), + + /// An error occured while executing some native code (e.g. a + /// builtin), and needs to be chained up. + NativeError { + gen_type: &'static str, + err: Box<Error>, + }, + + /// An error occured while executing Tvix bytecode, but needs to + /// be chained up. + BytecodeError(Box<Error>), + + /// Given type can't be coerced to a string in the respective context + NotCoercibleToString { + from: &'static str, + kind: CoercionKind, + }, + + /// The given string doesn't represent an absolute path + NotAnAbsolutePath(PathBuf), + + /// An error occurred when parsing an integer + ParseIntError(ParseIntError), + + /// A negative integer was used as a value representing length. + NegativeLength { + length: i64, + }, + + // Errors specific to nested attribute sets and merges thereof. + /// Nested attributes can not be merged with an inherited value. + UnmergeableInherit { + name: SmolStr, + }, + + /// Nested attributes can not be merged with values that are not + /// literal attribute sets. + UnmergeableValue, + + /// Parse errors occured while importing a file. + ImportParseError { + path: PathBuf, + file: Arc<File>, + errors: Vec<rnix::parser::ParseError>, + }, + + /// Compilation errors occured while importing a file. + ImportCompilerError { + path: PathBuf, + errors: Vec<Error>, + }, + + /// I/O errors + IO { + path: Option<PathBuf>, + error: Rc<io::Error>, + }, + + /// Errors converting JSON to a value + FromJsonError(String), + + /// Nix value that can not be serialised to JSON. + NotSerialisableToJson(&'static str), + + /// Errors converting TOML to a value + FromTomlError(String), + + /// An unexpected argument was supplied to a function that takes formal parameters + UnexpectedArgument { + arg: NixString, + formals_span: Span, + }, + + /// Errors while serialising to XML. + Xml(Rc<XmlError>), + + /// Variant for errors that bubble up to eval from other Tvix + /// components. + TvixError(Rc<dyn error::Error>), + + /// Variant for code paths that are known bugs in Tvix (usually + /// issues with the compiler/VM interaction). + TvixBug { + msg: &'static str, + metadata: Option<Rc<dyn Debug>>, + }, + + /// Tvix internal warning for features triggered by users that are + /// not actually implemented yet, and without which eval can not + /// proceed. + NotImplemented(&'static str), + + /// Internal variant which should disappear during error construction. + WithContext { + context: String, + underlying: Box<ErrorKind>, + }, +} + +impl error::Error for Error { + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + match &self.kind { + ErrorKind::NativeError { err, .. } | ErrorKind::BytecodeError(err) => err.source(), + ErrorKind::ParseErrors(err) => err.first().map(|e| e as &dyn error::Error), + ErrorKind::ParseIntError(err) => Some(err), + ErrorKind::ImportParseError { errors, .. } => { + errors.first().map(|e| e as &dyn error::Error) + } + ErrorKind::ImportCompilerError { errors, .. } => { + errors.first().map(|e| e as &dyn error::Error) + } + ErrorKind::IO { error, .. } => Some(error.as_ref()), + ErrorKind::Xml(error) => Some(error.as_ref()), + ErrorKind::TvixError(error) => Some(error.as_ref()), + _ => None, + } + } +} + +impl From<ParseIntError> for ErrorKind { + fn from(e: ParseIntError) -> Self { + Self::ParseIntError(e) + } +} + +impl From<Utf8Error> for ErrorKind { + fn from(_: Utf8Error) -> Self { + Self::NotImplemented("FromUtf8Error not handled: https://b.tvl.fyi/issues/189") + } +} + +impl From<FromUtf8Error> for ErrorKind { + fn from(_: FromUtf8Error) -> Self { + Self::NotImplemented("FromUtf8Error not handled: https://b.tvl.fyi/issues/189") + } +} + +impl From<XmlError> for ErrorKind { + fn from(err: XmlError) -> Self { + Self::Xml(Rc::new(err)) + } +} + +impl From<io::Error> for ErrorKind { + fn from(e: io::Error) -> Self { + ErrorKind::IO { + path: None, + error: Rc::new(e), + } + } +} + +impl ErrorKind { + /// Returns `true` if this error can be caught by `builtins.tryEval` + pub fn is_catchable(&self) -> bool { + match self { + Self::Throw(_) | Self::AssertionFailed | Self::NixPathResolution(_) => true, + Self::NativeError { err, .. } | Self::BytecodeError(err) => err.kind.is_catchable(), + _ => false, + } + } +} + +impl From<serde_json::Error> for ErrorKind { + fn from(err: serde_json::Error) -> Self { + // Can't just put the `serde_json::Error` in the ErrorKind since it doesn't impl `Clone` + Self::FromJsonError(format!("error in JSON serialization: {err}")) + } +} + +impl From<toml::de::Error> for ErrorKind { + fn from(err: toml::de::Error) -> Self { + Self::FromTomlError(format!("error in TOML serialization: {err}")) + } +} + +#[derive(Clone, Debug)] +pub struct Error { + pub kind: ErrorKind, + pub span: Span, + pub contexts: Vec<String>, +} + +impl Error { + pub fn new(mut kind: ErrorKind, span: Span) -> Self { + let mut contexts = vec![]; + while let ErrorKind::WithContext { + context, + underlying, + } = kind + { + kind = *underlying; + contexts.push(context); + } + + Error { + kind, + span, + contexts, + } + } +} + +impl Display for ErrorKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match &self { + ErrorKind::Throw(msg) => write!(f, "error thrown: {}", msg), + ErrorKind::Abort(msg) => write!(f, "evaluation aborted: {}", msg), + ErrorKind::AssertionFailed => write!(f, "assertion failed"), + + ErrorKind::DivisionByZero => write!(f, "division by zero"), + + ErrorKind::DuplicateAttrsKey { key } => { + write!(f, "attribute key '{}' already defined", key) + } + + ErrorKind::InvalidAttributeName(val) => write!( + f, + "found attribute name '{}' of type '{}', but attribute names must be strings", + val, + val.type_of() + ), + + ErrorKind::AttributeNotFound { name } => write!( + f, + "attribute with name '{}' could not be found in the set", + name + ), + + ErrorKind::IndexOutOfBounds { index } => { + write!(f, "list index '{}' is out of bounds", index) + } + + ErrorKind::TailEmptyList => write!(f, "'tail' called on an empty list"), + + ErrorKind::TypeError { expected, actual } => write!( + f, + "expected value of type '{}', but found a '{}'", + expected, actual + ), + + ErrorKind::Incomparable { lhs, rhs } => { + write!(f, "can not compare a {} with a {}", lhs, rhs) + } + + ErrorKind::NixPathResolution(err) | ErrorKind::RelativePathResolution(err) => { + write!(f, "could not resolve path: {}", err) + } + + ErrorKind::DynamicKeyInScope(scope) => { + write!(f, "dynamically evaluated keys are not allowed in {}", scope) + } + + ErrorKind::UnknownStaticVariable => write!(f, "variable not found"), + + ErrorKind::UnknownDynamicVariable(name) => write!( + f, + r#"variable '{}' could not be found + +Note that this occured within a `with`-expression. The problem may be related +to a missing value in the attribute set(s) included via `with`."#, + name + ), + + ErrorKind::VariableAlreadyDefined(_) => write!(f, "variable has already been defined"), + + ErrorKind::NotCallable(other_type) => { + write!( + f, + "only functions and builtins can be called, but this is a '{}'", + other_type + ) + } + + ErrorKind::InfiniteRecursion { .. } => write!(f, "infinite recursion encountered"), + + // Errors themselves ignored here & handled in Self::spans instead + ErrorKind::ParseErrors(_) => write!(f, "failed to parse Nix code:"), + + ErrorKind::NativeError { gen_type, .. } => { + write!(f, "while evaluating this as native code ({})", gen_type) + } + + ErrorKind::BytecodeError(_) => write!(f, "while evaluating this Nix code"), + + ErrorKind::NotCoercibleToString { kind, from } => { + let kindly = match kind { + CoercionKind::Strong => "strongly", + CoercionKind::Weak => "weakly", + }; + + let hint = if *from == "set" { + ", missing a `__toString` or `outPath` attribute" + } else { + "" + }; + + write!(f, "cannot ({kindly}) coerce {from} to a string{hint}") + } + + ErrorKind::NotAnAbsolutePath(given) => { + write!( + f, + "string '{}' does not represent an absolute path", + given.to_string_lossy() + ) + } + + ErrorKind::ParseIntError(err) => { + write!(f, "invalid integer: {}", err) + } + + ErrorKind::NegativeLength { length } => { + write!( + f, + "cannot use a negative integer, {}, for a value representing length", + length + ) + } + + ErrorKind::UnmergeableInherit { name } => { + write!( + f, + "cannot merge a nested attribute set into the inherited entry '{}'", + name + ) + } + + ErrorKind::UnmergeableValue => { + write!( + f, + "nested attribute sets or keys can only be merged with literal attribute sets" + ) + } + + // Errors themselves ignored here & handled in Self::spans instead + ErrorKind::ImportParseError { path, .. } => { + write!( + f, + "parse errors occured while importing '{}'", + path.to_string_lossy() + ) + } + + ErrorKind::ImportCompilerError { path, .. } => { + writeln!( + f, + "compiler errors occured while importing '{}'", + path.to_string_lossy() + ) + } + + ErrorKind::IO { path, error } => { + write!(f, "I/O error: ")?; + if let Some(path) = path { + write!(f, "{}: ", path.display())?; + } + write!(f, "{error}") + } + + ErrorKind::FromJsonError(msg) => { + write!(f, "Error converting JSON to a Nix value: {msg}") + } + + ErrorKind::NotSerialisableToJson(_type) => { + write!(f, "a {} cannot be converted to JSON", _type) + } + + ErrorKind::FromTomlError(msg) => { + write!(f, "Error converting TOML to a Nix value: {msg}") + } + + ErrorKind::UnexpectedArgument { arg, .. } => { + write!( + f, + "Unexpected argument `{}` supplied to function", + arg.as_str() + ) + } + + ErrorKind::Xml(error) => write!(f, "failed to serialise to XML: {error}"), + + ErrorKind::TvixError(inner_error) => { + write!(f, "{inner_error}") + } + + ErrorKind::TvixBug { msg, metadata } => { + write!(f, "Tvix bug: {}", msg)?; + + if let Some(metadata) = metadata { + write!(f, "; metadata: {:?}", metadata)?; + } + + Ok(()) + } + + ErrorKind::NotImplemented(feature) => { + write!(f, "feature not yet implemented in Tvix: {}", feature) + } + + ErrorKind::WithContext { .. } => { + panic!("internal ErrorKind::WithContext variant leaked") + } + } + } +} + +impl Display for Error { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.kind) + } +} + +pub type EvalResult<T> = Result<T, Error>; + +/// Human-readable names for rnix syntaxes. +fn name_for_syntax(syntax: &rnix::SyntaxKind) -> &'static str { + match syntax { + rnix::SyntaxKind::TOKEN_COMMENT => "a comment", + rnix::SyntaxKind::TOKEN_WHITESPACE => "whitespace", + rnix::SyntaxKind::TOKEN_ASSERT => "`assert`-keyword", + rnix::SyntaxKind::TOKEN_ELSE => "`else`-keyword", + rnix::SyntaxKind::TOKEN_IN => "`in`-keyword", + rnix::SyntaxKind::TOKEN_IF => "`if`-keyword", + rnix::SyntaxKind::TOKEN_INHERIT => "`inherit`-keyword", + rnix::SyntaxKind::TOKEN_LET => "`let`-keyword", + rnix::SyntaxKind::TOKEN_OR => "`or`-keyword", + rnix::SyntaxKind::TOKEN_REC => "`rec`-keyword", + rnix::SyntaxKind::TOKEN_THEN => "`then`-keyword", + rnix::SyntaxKind::TOKEN_WITH => "`with`-keyword", + rnix::SyntaxKind::TOKEN_L_BRACE => "{", + rnix::SyntaxKind::TOKEN_R_BRACE => "}", + rnix::SyntaxKind::TOKEN_L_BRACK => "[", + rnix::SyntaxKind::TOKEN_R_BRACK => "]", + rnix::SyntaxKind::TOKEN_ASSIGN => "=", + rnix::SyntaxKind::TOKEN_AT => "@", + rnix::SyntaxKind::TOKEN_COLON => ":", + rnix::SyntaxKind::TOKEN_COMMA => "`,`", + rnix::SyntaxKind::TOKEN_DOT => ".", + rnix::SyntaxKind::TOKEN_ELLIPSIS => "...", + rnix::SyntaxKind::TOKEN_QUESTION => "?", + rnix::SyntaxKind::TOKEN_SEMICOLON => ";", + rnix::SyntaxKind::TOKEN_L_PAREN => "(", + rnix::SyntaxKind::TOKEN_R_PAREN => ")", + rnix::SyntaxKind::TOKEN_CONCAT => "++", + rnix::SyntaxKind::TOKEN_INVERT => "!", + rnix::SyntaxKind::TOKEN_UPDATE => "//", + rnix::SyntaxKind::TOKEN_ADD => "+", + rnix::SyntaxKind::TOKEN_SUB => "-", + rnix::SyntaxKind::TOKEN_MUL => "*", + rnix::SyntaxKind::TOKEN_DIV => "/", + rnix::SyntaxKind::TOKEN_AND_AND => "&&", + rnix::SyntaxKind::TOKEN_EQUAL => "==", + rnix::SyntaxKind::TOKEN_IMPLICATION => "->", + rnix::SyntaxKind::TOKEN_LESS => "<", + rnix::SyntaxKind::TOKEN_LESS_OR_EQ => "<=", + rnix::SyntaxKind::TOKEN_MORE => ">", + rnix::SyntaxKind::TOKEN_MORE_OR_EQ => ">=", + rnix::SyntaxKind::TOKEN_NOT_EQUAL => "!=", + rnix::SyntaxKind::TOKEN_OR_OR => "||", + rnix::SyntaxKind::TOKEN_FLOAT => "a float", + rnix::SyntaxKind::TOKEN_IDENT => "an identifier", + rnix::SyntaxKind::TOKEN_INTEGER => "an integer", + rnix::SyntaxKind::TOKEN_INTERPOL_END => "}", + rnix::SyntaxKind::TOKEN_INTERPOL_START => "${", + rnix::SyntaxKind::TOKEN_PATH => "a path", + rnix::SyntaxKind::TOKEN_URI => "a literal URI", + rnix::SyntaxKind::TOKEN_STRING_CONTENT => "content of a string", + rnix::SyntaxKind::TOKEN_STRING_END => "\"", + rnix::SyntaxKind::TOKEN_STRING_START => "\"", + + rnix::SyntaxKind::NODE_APPLY => "a function application", + rnix::SyntaxKind::NODE_ASSERT => "an assertion", + rnix::SyntaxKind::NODE_ATTRPATH => "an attribute path", + rnix::SyntaxKind::NODE_DYNAMIC => "a dynamic identifier", + + rnix::SyntaxKind::NODE_IDENT => "an identifier", + rnix::SyntaxKind::NODE_IF_ELSE => "an `if`-expression", + rnix::SyntaxKind::NODE_SELECT => "a `select`-expression", + rnix::SyntaxKind::NODE_INHERIT => "inherited values", + rnix::SyntaxKind::NODE_INHERIT_FROM => "inherited values", + rnix::SyntaxKind::NODE_STRING => "a string", + rnix::SyntaxKind::NODE_INTERPOL => "an interpolation", + rnix::SyntaxKind::NODE_LAMBDA => "a function", + rnix::SyntaxKind::NODE_IDENT_PARAM => "a function parameter", + rnix::SyntaxKind::NODE_LEGACY_LET => "a legacy `let`-expression", + rnix::SyntaxKind::NODE_LET_IN => "a `let`-expression", + rnix::SyntaxKind::NODE_LIST => "a list", + rnix::SyntaxKind::NODE_BIN_OP => "a binary operator", + rnix::SyntaxKind::NODE_PAREN => "a parenthesised expression", + rnix::SyntaxKind::NODE_PATTERN => "a function argument pattern", + rnix::SyntaxKind::NODE_PAT_BIND => "an argument pattern binding", + rnix::SyntaxKind::NODE_PAT_ENTRY => "an argument pattern entry", + rnix::SyntaxKind::NODE_ROOT => "a Nix expression", + rnix::SyntaxKind::NODE_ATTR_SET => "an attribute set", + rnix::SyntaxKind::NODE_ATTRPATH_VALUE => "an attribute set entry", + rnix::SyntaxKind::NODE_UNARY_OP => "a unary operator", + rnix::SyntaxKind::NODE_LITERAL => "a literal value", + rnix::SyntaxKind::NODE_WITH => "a `with`-expression", + rnix::SyntaxKind::NODE_PATH => "a path", + rnix::SyntaxKind::NODE_HAS_ATTR => "`?`-operator", + + // TODO(tazjin): unsure what these variants are, lets crash! + rnix::SyntaxKind::NODE_ERROR => todo!("NODE_ERROR found, tell tazjin!"), + rnix::SyntaxKind::TOKEN_ERROR => todo!("TOKEN_ERROR found, tell tazjin!"), + _ => todo!(), + } +} + +/// Construct the string representation for a list of expected parser tokens. +fn expected_syntax(one_of: &[rnix::SyntaxKind]) -> String { + match one_of.len() { + 0 => "nothing".into(), + 1 => format!("'{}'", name_for_syntax(&one_of[0])), + _ => { + let mut out: String = "one of: ".into(); + let end = one_of.len() - 1; + + for (idx, item) in one_of.iter().enumerate() { + if idx != 0 { + out.push_str(", "); + } else if idx == end { + out.push_str(", or "); + }; + + out.push_str(name_for_syntax(item)); + } + + out + } + } +} + +/// Process a list of parse errors into a set of span labels, annotating parse +/// errors. +fn spans_for_parse_errors(file: &File, errors: &[rnix::parser::ParseError]) -> Vec<SpanLabel> { + // rnix has a tendency to emit some identical errors more than once, but + // they do not enhance the user experience necessarily, so we filter them + // out + let mut had_eof = false; + + errors + .iter() + .enumerate() + .filter_map(|(idx, err)| { + let (span, label): (Span, String) = match err { + rnix::parser::ParseError::Unexpected(range) => ( + range.span_for(file), + "found an unexpected syntax element here".into(), + ), + + rnix::parser::ParseError::UnexpectedExtra(range) => ( + range.span_for(file), + "found unexpected extra elements at the root of the expression".into(), + ), + + rnix::parser::ParseError::UnexpectedWanted(found, range, wanted) => { + let span = range.span_for(file); + ( + span, + format!( + "found '{}', but expected {}", + name_for_syntax(found), + expected_syntax(wanted), + ), + ) + } + + rnix::parser::ParseError::UnexpectedEOF => { + if had_eof { + return None; + } + + had_eof = true; + + ( + file.span, + "code ended unexpectedly while the parser still expected more".into(), + ) + } + + rnix::parser::ParseError::UnexpectedEOFWanted(wanted) => { + had_eof = true; + + ( + file.span, + format!( + "code ended unexpectedly, but wanted {}", + expected_syntax(wanted) + ), + ) + } + + rnix::parser::ParseError::DuplicatedArgs(range, name) => ( + range.span_for(file), + format!( + "the function argument pattern '{}' was bound more than once", + name + ), + ), + + rnix::parser::ParseError::RecursionLimitExceeded => ( + file.span, + "this code exceeds the parser's recursion limit, please report a Tvix bug" + .to_string(), + ), + + // TODO: can rnix even still throw this? it's semantic! + rnix::parser::ParseError::UnexpectedDoubleBind(range) => ( + range.span_for(file), + "this pattern was bound more than once".into(), + ), + + // The error enum is marked as `#[non_exhaustive]` in rnix, + // which disables the compiler error for missing a variant. This + // feature makes it possible for users to miss critical updates + // of enum variants for a more exciting runtime experience. + new => todo!("new parse error variant: {}", new), + }; + + Some(SpanLabel { + span, + label: Some(label), + style: if idx == 0 { + SpanStyle::Primary + } else { + SpanStyle::Secondary + }, + }) + }) + .collect() +} + +impl Error { + pub fn fancy_format_str(&self, source: &SourceCode) -> String { + let mut out = vec![]; + Emitter::vec(&mut out, Some(&*source.codemap())).emit(&self.diagnostics(source)); + String::from_utf8_lossy(&out).to_string() + } + + /// Render a fancy, human-readable output of this error and print + /// it to stderr. + pub fn fancy_format_stderr(&self, source: &SourceCode) { + Emitter::stderr(ColorConfig::Auto, Some(&*source.codemap())) + .emit(&self.diagnostics(source)); + } + + /// Create the optional span label displayed as an annotation on + /// the underlined span of the error. + fn span_label(&self) -> Option<String> { + let label = match &self.kind { + ErrorKind::DuplicateAttrsKey { .. } => "in this attribute set", + ErrorKind::InvalidAttributeName(_) => "in this attribute set", + ErrorKind::NixPathResolution(_) | ErrorKind::RelativePathResolution(_) => { + "in this path literal" + } + ErrorKind::UnexpectedArgument { .. } => "in this function call", + + // The spans for some errors don't have any more descriptive stuff + // in them, or we don't utilise it yet. + ErrorKind::Throw(_) + | ErrorKind::Abort(_) + | ErrorKind::AssertionFailed + | ErrorKind::AttributeNotFound { .. } + | ErrorKind::IndexOutOfBounds { .. } + | ErrorKind::TailEmptyList + | ErrorKind::TypeError { .. } + | ErrorKind::Incomparable { .. } + | ErrorKind::DivisionByZero + | ErrorKind::DynamicKeyInScope(_) + | ErrorKind::UnknownStaticVariable + | ErrorKind::UnknownDynamicVariable(_) + | ErrorKind::VariableAlreadyDefined(_) + | ErrorKind::NotCallable(_) + | ErrorKind::InfiniteRecursion { .. } + | ErrorKind::ParseErrors(_) + | ErrorKind::NativeError { .. } + | ErrorKind::BytecodeError(_) + | ErrorKind::NotCoercibleToString { .. } + | ErrorKind::NotAnAbsolutePath(_) + | ErrorKind::ParseIntError(_) + | ErrorKind::NegativeLength { .. } + | ErrorKind::UnmergeableInherit { .. } + | ErrorKind::UnmergeableValue + | ErrorKind::ImportParseError { .. } + | ErrorKind::ImportCompilerError { .. } + | ErrorKind::IO { .. } + | ErrorKind::FromJsonError(_) + | ErrorKind::NotSerialisableToJson(_) + | ErrorKind::FromTomlError(_) + | ErrorKind::Xml(_) + | ErrorKind::TvixError(_) + | ErrorKind::TvixBug { .. } + | ErrorKind::NotImplemented(_) + | ErrorKind::WithContext { .. } => return None, + }; + + Some(label.into()) + } + + /// Return the unique error code for this variant which can be + /// used to refer users to documentation. + fn code(&self) -> &'static str { + match self.kind { + ErrorKind::Throw(_) => "E001", + ErrorKind::Abort(_) => "E002", + ErrorKind::AssertionFailed => "E003", + ErrorKind::InvalidAttributeName { .. } => "E004", + ErrorKind::AttributeNotFound { .. } => "E005", + ErrorKind::TypeError { .. } => "E006", + ErrorKind::Incomparable { .. } => "E007", + ErrorKind::NixPathResolution(_) => "E008", + ErrorKind::DynamicKeyInScope(_) => "E009", + ErrorKind::UnknownStaticVariable => "E010", + ErrorKind::UnknownDynamicVariable(_) => "E011", + ErrorKind::VariableAlreadyDefined(_) => "E012", + ErrorKind::NotCallable(_) => "E013", + ErrorKind::InfiniteRecursion { .. } => "E014", + ErrorKind::ParseErrors(_) => "E015", + ErrorKind::DuplicateAttrsKey { .. } => "E016", + ErrorKind::NotCoercibleToString { .. } => "E018", + ErrorKind::IndexOutOfBounds { .. } => "E019", + ErrorKind::NotAnAbsolutePath(_) => "E020", + ErrorKind::ParseIntError(_) => "E021", + ErrorKind::NegativeLength { .. } => "E022", + ErrorKind::TailEmptyList { .. } => "E023", + ErrorKind::UnmergeableInherit { .. } => "E024", + ErrorKind::UnmergeableValue => "E025", + ErrorKind::ImportParseError { .. } => "E027", + ErrorKind::ImportCompilerError { .. } => "E028", + ErrorKind::IO { .. } => "E029", + ErrorKind::FromJsonError { .. } => "E030", + ErrorKind::UnexpectedArgument { .. } => "E031", + ErrorKind::RelativePathResolution(_) => "E032", + ErrorKind::DivisionByZero => "E033", + ErrorKind::Xml(_) => "E034", + ErrorKind::FromTomlError(_) => "E035", + ErrorKind::NotSerialisableToJson(_) => "E036", + + // Special error code for errors from other Tvix + // components. We may want to introduce a code namespacing + // system to have these errors pass codes through. + ErrorKind::TvixError(_) => "E997", + + // Special error code that is not part of the normal + // ordering. + ErrorKind::TvixBug { .. } => "E998", + + // Placeholder error while Tvix is under construction. + ErrorKind::NotImplemented(_) => "E999", + + // Chained errors should yield the code of the innermost + // error. + ErrorKind::NativeError { ref err, .. } | ErrorKind::BytecodeError(ref err) => { + err.code() + } + + ErrorKind::WithContext { .. } => { + panic!("internal ErrorKind::WithContext variant leaked") + } + } + } + + fn spans(&self, source: &SourceCode) -> Vec<SpanLabel> { + let mut spans = match &self.kind { + ErrorKind::ImportParseError { errors, file, .. } => { + spans_for_parse_errors(file, errors) + } + + ErrorKind::ParseErrors(errors) => { + let file = source.get_file(self.span); + spans_for_parse_errors(&file, errors) + } + + ErrorKind::UnexpectedArgument { formals_span, .. } => { + vec![ + SpanLabel { + label: self.span_label(), + span: self.span, + style: SpanStyle::Primary, + }, + SpanLabel { + label: Some("the accepted arguments".into()), + span: *formals_span, + style: SpanStyle::Secondary, + }, + ] + } + + ErrorKind::InfiniteRecursion { + first_force, + suspended_at, + content_span, + } => { + let mut spans = vec![]; + + if let Some(content_span) = content_span { + spans.push(SpanLabel { + label: Some("this lazily-evaluated code".into()), + span: *content_span, + style: SpanStyle::Secondary, + }) + } + + if let Some(suspended_at) = suspended_at { + spans.push(SpanLabel { + label: Some("which was instantiated here".into()), + span: *suspended_at, + style: SpanStyle::Secondary, + }) + } + + spans.push(SpanLabel { + label: Some("was first requested to be evaluated here".into()), + span: *first_force, + style: SpanStyle::Secondary, + }); + + spans.push(SpanLabel { + label: Some("but then requested again here during its own evaluation".into()), + span: self.span, + style: SpanStyle::Primary, + }); + + spans + } + + // All other errors pretty much have the same shape. + _ => { + vec![SpanLabel { + label: self.span_label(), + span: self.span, + style: SpanStyle::Primary, + }] + } + }; + + for ctx in &self.contexts { + spans.push(SpanLabel { + label: Some(format!("while {}", ctx)), + span: self.span, + style: SpanStyle::Secondary, + }); + } + + spans + } + + /// Create the primary diagnostic for a given error. + fn diagnostic(&self, source: &SourceCode) -> Diagnostic { + Diagnostic { + level: Level::Error, + message: self.to_string(), + spans: self.spans(source), + code: Some(self.code().into()), + } + } + + /// Return the primary diagnostic and all further associated diagnostics (if + /// any) of an error. + fn diagnostics(&self, source: &SourceCode) -> Vec<Diagnostic> { + match &self.kind { + ErrorKind::ImportCompilerError { errors, .. } => { + let mut out = vec![self.diagnostic(source)]; + out.extend(errors.iter().map(|e| e.diagnostic(source))); + out + } + + // When encountering either of these error kinds, we are dealing + // with the top of an error chain. + // + // An error chain creates a list of diagnostics which provide trace + // information. + // + // We don't know how deep this chain is, so we avoid recursing in + // this function while unrolling the chain. + ErrorKind::NativeError { err: next, .. } | ErrorKind::BytecodeError(next) => { + // Accumulated diagnostics to return. + let mut diagnostics: Vec<Diagnostic> = vec![]; + + // The next (inner) error to add to the diagnostics, after this + // one. + let mut next = *next.clone(); + + // Diagnostic message for *this* error. + let mut this_message = self.to_string(); + + // Primary span for *this* error. + let mut this_span = self.span; + + // Diagnostic spans for *this* error. + let mut this_spans = self.spans(source); + + loop { + if is_new_span( + this_span, + diagnostics.last().and_then(|last| last.spans.last()), + ) { + diagnostics.push(Diagnostic { + level: Level::Note, + message: this_message, + spans: this_spans, + code: None, // only the top-level error has one + }); + } + + this_message = next.to_string(); + this_span = next.span; + this_spans = next.spans(source); + + match next.kind { + ErrorKind::NativeError { err: inner, .. } + | ErrorKind::BytecodeError(inner) => { + next = *inner; + continue; + } + _ => { + diagnostics.extend(next.diagnostics(source)); + break; + } + } + } + + diagnostics + } + + _ => vec![self.diagnostic(source)], + } + } +} + +// Check if this error is in a different span from its immediate ancestor. +fn is_new_span(this_span: Span, parent: Option<&SpanLabel>) -> bool { + match parent { + None => true, + Some(parent) => parent.span != this_span, + } +} + +// Convenience methods to add context on other types. +pub trait AddContext { + /// Add context to the error-carrying type. + fn context<S: Into<String>>(self, ctx: S) -> Self; +} + +impl AddContext for ErrorKind { + fn context<S: Into<String>>(self, ctx: S) -> Self { + ErrorKind::WithContext { + context: ctx.into(), + underlying: Box::new(self), + } + } +} + +impl<T> AddContext for Result<T, ErrorKind> { + fn context<S: Into<String>>(self, ctx: S) -> Self { + self.map_err(|kind| kind.context(ctx)) + } +} + +impl<T> AddContext for Result<T, Error> { + fn context<S: Into<String>>(self, ctx: S) -> Self { + self.map_err(|err| Error { + kind: err.kind.context(ctx), + ..err + }) + } +} diff --git a/tvix/eval/src/io.rs b/tvix/eval/src/io.rs new file mode 100644 index 000000000000..708c36153c47 --- /dev/null +++ b/tvix/eval/src/io.rs @@ -0,0 +1,138 @@ +//! Interface for injecting I/O-related functionality into tvix-eval. +//! +//! The Nix language contains several builtins (e.g. `builtins.readDir`), as +//! well as language feature (e.g. string-"coercion" of paths) that interact +//! with the filesystem. +//! +//! The language evaluator implemented by this crate does not depend on any +//! particular filesystem interaction model. Instead, this module provides a +//! trait that can be implemented by tvix-eval callers to provide the +//! functionality they desire. +//! +//! In theory this can be used to implement "mocked" filesystem interactions, or +//! interaction with remote filesystems, etc. +//! +//! In the context of Nix builds, callers also use this interface to determine +//! how store paths are opened and so on. + +use smol_str::SmolStr; +use std::{ + io, + path::{Path, PathBuf}, +}; + +/// Types of files as represented by `builtins.readDir` in Nix. +#[derive(Debug)] +pub enum FileType { + Directory, + Regular, + Symlink, + Unknown, +} + +/// Defines how filesystem interaction occurs inside of tvix-eval. +pub trait EvalIO { + /// Verify whether the file at the specified path exists. + fn path_exists(&self, path: &Path) -> Result<bool, io::Error>; + + /// Read the file at the specified path to a string. + fn read_to_string(&self, path: &Path) -> Result<String, io::Error>; + + /// Read the directory at the specified path and return the names + /// of its entries associated with their [`FileType`]. + fn read_dir(&self, path: &Path) -> Result<Vec<(SmolStr, FileType)>, io::Error>; + + /// Import the given path. What this means depends on the + /// implementation, for example for a `std::io`-based + /// implementation this might be a no-op, while for a Tvix store + /// this might be a copy of the given files to the store. + /// + /// This is primarily used in the context of things like coercing + /// a local path to a string, or builtins like `path`. + fn import_path(&self, path: &Path) -> Result<PathBuf, io::Error>; + + /// Returns the root of the store directory, if such a thing + /// exists in the evaluation context. + fn store_dir(&self) -> Option<String> { + None + } +} + +/// Implementation of [`EvalIO`] that simply uses the equivalent +/// standard library functions, i.e. does local file-IO. +#[cfg(feature = "impure")] +pub struct StdIO; + +#[cfg(feature = "impure")] +impl EvalIO for StdIO { + fn path_exists(&self, path: &Path) -> Result<bool, io::Error> { + path.try_exists() + } + + fn read_to_string(&self, path: &Path) -> Result<String, io::Error> { + std::fs::read_to_string(&path) + } + + fn read_dir(&self, path: &Path) -> Result<Vec<(SmolStr, FileType)>, io::Error> { + let mut result = vec![]; + + for entry in path.read_dir()? { + let entry = entry?; + let file_type = entry.metadata()?.file_type(); + + let val = if file_type.is_dir() { + FileType::Directory + } else if file_type.is_file() { + FileType::Regular + } else if file_type.is_symlink() { + FileType::Symlink + } else { + FileType::Unknown + }; + + result.push((SmolStr::new(entry.file_name().to_string_lossy()), val)); + } + + Ok(result) + } + + // this is a no-op for `std::io`, as the user can already refer to + // the path directly + fn import_path(&self, path: &Path) -> Result<PathBuf, io::Error> { + Ok(path.to_path_buf()) + } +} + +/// Dummy implementation of [`EvalIO`], can be used in contexts where +/// IO is not available but code should "pretend" that it is. +pub struct DummyIO; + +impl EvalIO for DummyIO { + fn path_exists(&self, _: &Path) -> Result<bool, io::Error> { + Err(io::Error::new( + io::ErrorKind::Unsupported, + "I/O methods are not implemented in DummyIO", + )) + } + + fn read_to_string(&self, _: &Path) -> Result<String, io::Error> { + Err(io::Error::new( + io::ErrorKind::Unsupported, + "I/O methods are not implemented in DummyIO", + )) + } + + fn read_dir(&self, _: &Path) -> Result<Vec<(SmolStr, FileType)>, io::Error> { + Err(io::Error::new( + io::ErrorKind::Unsupported, + "I/O methods are not implemented in DummyIO", + )) + } + + fn import_path(&self, _: &Path) -> Result<PathBuf, io::Error> { + Err(io::Error::new( + io::ErrorKind::Unsupported, + "I/O methods are not implemented in DummyIO", + )) + } +} diff --git a/tvix/eval/src/lib.rs b/tvix/eval/src/lib.rs new file mode 100644 index 000000000000..deb48b50078e --- /dev/null +++ b/tvix/eval/src/lib.rs @@ -0,0 +1,346 @@ +//! `tvix-eval` implements the evaluation of the Nix programming language in +//! Tvix. +//! +//! It is designed to allow users to use Nix as a versatile language for +//! different use-cases. +//! +//! This module exports the high-level functions and types needed for evaluating +//! Nix code and interacting with the language's data structures. +//! +//! Nix has several language features that make use of impurities (such as +//! reading from the NIX_PATH environment variable, or interacting with files). +//! These features are optional and the API of this crate exposes functionality +//! for controlling how they work. + +pub mod builtins; +mod chunk; +mod compiler; +mod errors; +mod io; +pub mod observer; +mod opcode; +mod pretty_ast; +mod source; +mod spans; +mod systems; +mod upvalues; +mod value; +mod vm; +mod warnings; + +mod nix_search_path; +#[cfg(test)] +mod properties; +#[cfg(test)] +mod test_utils; +#[cfg(test)] +mod tests; + +use std::path::PathBuf; +use std::rc::Rc; +use std::str::FromStr; +use std::sync::Arc; + +use crate::compiler::GlobalsMap; +use crate::observer::{CompilerObserver, RuntimeObserver}; +use crate::value::Lambda; +use crate::vm::run_lambda; + +// Re-export the public interface used by other crates. +pub use crate::compiler::{compile, prepare_globals, CompilationOutput}; +pub use crate::errors::{AddContext, Error, ErrorKind, EvalResult}; +pub use crate::io::{DummyIO, EvalIO, FileType}; +pub use crate::pretty_ast::pretty_print_expr; +pub use crate::source::SourceCode; +pub use crate::vm::generators; +pub use crate::warnings::{EvalWarning, WarningKind}; +pub use builtin_macros; + +pub use crate::value::{Builtin, CoercionKind, NixAttrs, NixList, NixString, Value}; + +#[cfg(feature = "impure")] +pub use crate::io::StdIO; + +/// An `Evaluation` represents how a piece of Nix code is evaluated. It can be +/// instantiated and configured directly, or it can be accessed through the +/// various simplified helper methods available below. +/// +/// Public fields are intended to be set by the caller. Setting all +/// fields is optional. +pub struct Evaluation<'code, 'co, 'ro> { + /// The Nix source code to be evaluated. + code: &'code str, + + /// Optional location of the source code (i.e. path to the file it was read + /// from). Used for error reporting, and for resolving relative paths in + /// impure functions. + location: Option<PathBuf>, + + /// Source code map used for error reporting. + source_map: SourceCode, + + /// Top-level file reference for this code inside the source map. + file: Arc<codemap::File>, + + /// Set of all builtins that should be available during the + /// evaluation. + /// + /// This defaults to all pure builtins. Users might want to add + /// the set of impure builtins, or other custom builtins. + pub builtins: Vec<(&'static str, Value)>, + + /// Set of builtins that are implemented in Nix itself and should + /// be compiled and inserted in the builtins set. + pub src_builtins: Vec<(&'static str, &'static str)>, + + /// Implementation of file-IO to use during evaluation, e.g. for + /// impure builtins. + /// + /// Defaults to [`DummyIO`] if not set explicitly. + pub io_handle: Box<dyn EvalIO>, + + /// Determines whether the `import` builtin should be made + /// available. Note that this depends on the `io_handle` being + /// able to read the files specified as arguments to `import`. + pub enable_import: bool, + + /// Determines whether the returned value should be strictly + /// evaluated, that is whether its list and attribute set elements + /// should be forced recursively. + pub strict: bool, + + /// (optional) Nix search path, e.g. the value of `NIX_PATH` used + /// for resolving items on the search path (such as `<nixpkgs>`). + pub nix_path: Option<String>, + + /// (optional) compiler observer for reporting on compilation + /// details, like the emitted bytecode. + pub compiler_observer: Option<&'co mut dyn CompilerObserver>, + + /// (optional) runtime observer, for reporting on execution steps + /// of Nix code. + pub runtime_observer: Option<&'ro mut dyn RuntimeObserver>, +} + +/// Result of evaluating a piece of Nix code. If evaluation succeeded, a value +/// will be present (and potentially some warnings!). If evaluation failed, +/// errors will be present. +#[derive(Debug, Default)] +pub struct EvaluationResult { + /// Nix value that the code evaluated to. + pub value: Option<Value>, + + /// Errors that occured during evaluation (if any). + pub errors: Vec<Error>, + + /// Warnings that occured during evaluation. Warnings are not critical, but + /// should be addressed either to modernise code or improve performance. + pub warnings: Vec<EvalWarning>, + + /// AST node that was parsed from the code (on success only). + pub expr: Option<rnix::ast::Expr>, +} + +impl<'code, 'co, 'ro> Evaluation<'code, 'co, 'ro> { + /// Initialise an `Evaluation` for the given Nix source code snippet, and + /// an optional code location. + pub fn new(code: &'code str, location: Option<PathBuf>) -> Self { + let source_map = SourceCode::new(); + + let location_str = location + .as_ref() + .map(|p| p.to_string_lossy().to_string()) + .unwrap_or_else(|| "[code]".into()); + + let file = source_map.add_file(location_str, code.into()); + + let mut builtins = builtins::pure_builtins(); + builtins.extend(builtins::placeholders()); // these are temporary + + Evaluation { + code, + location, + source_map, + file, + builtins, + src_builtins: vec![], + io_handle: Box::new(DummyIO {}), + enable_import: false, + strict: false, + nix_path: None, + compiler_observer: None, + runtime_observer: None, + } + } + + #[cfg(feature = "impure")] + /// Initialise an `Evaluation` for the given snippet, with all + /// impure features turned on by default. + pub fn new_impure(code: &'code str, location: Option<PathBuf>) -> Self { + let mut eval = Self::new(code, location); + eval.enable_import = true; + eval.builtins.extend(builtins::impure_builtins()); + eval.io_handle = Box::new(StdIO); + eval + } + + /// Clone the reference to the contained source code map. This is used after + /// an evaluation for pretty error printing. + pub fn source_map(&self) -> SourceCode { + self.source_map.clone() + } + + /// Only compile the provided source code. This does not *run* the + /// code, it only provides analysis (errors and warnings) of the + /// compiler. + pub fn compile_only(mut self) -> EvaluationResult { + let mut result = EvaluationResult::default(); + let source = self.source_map(); + + let mut noop_observer = observer::NoOpObserver::default(); + let compiler_observer = self.compiler_observer.take().unwrap_or(&mut noop_observer); + + parse_compile_internal( + &mut result, + self.code, + self.file.clone(), + self.location, + source, + self.builtins, + self.src_builtins, + self.enable_import, + compiler_observer, + ); + + result + } + + /// Evaluate the provided source code. + pub fn evaluate(mut self) -> EvaluationResult { + let mut result = EvaluationResult::default(); + let source = self.source_map(); + + let mut noop_observer = observer::NoOpObserver::default(); + let compiler_observer = self.compiler_observer.take().unwrap_or(&mut noop_observer); + + // Insert a storeDir builtin *iff* a store directory is present. + if let Some(store_dir) = self.io_handle.store_dir() { + self.builtins.push(("storeDir", store_dir.into())); + } + + let (lambda, globals) = match parse_compile_internal( + &mut result, + self.code, + self.file.clone(), + self.location, + source, + self.builtins, + self.src_builtins, + self.enable_import, + compiler_observer, + ) { + None => return result, + Some(cr) => cr, + }; + + // If bytecode was returned, there were no errors and the + // code is safe to execute. + + let nix_path = self + .nix_path + .as_ref() + .and_then(|s| match nix_search_path::NixSearchPath::from_str(s) { + Ok(path) => Some(path), + Err(err) => { + result.warnings.push(EvalWarning { + kind: WarningKind::InvalidNixPath(err.to_string()), + span: self.file.span, + }); + None + } + }) + .unwrap_or_default(); + + let runtime_observer = self.runtime_observer.take().unwrap_or(&mut noop_observer); + + let vm_result = run_lambda( + nix_path, + self.io_handle, + runtime_observer, + globals, + lambda, + self.strict, + ); + + match vm_result { + Ok(mut runtime_result) => { + result.warnings.append(&mut runtime_result.warnings); + result.value = Some(runtime_result.value); + } + Err(err) => { + result.errors.push(err); + } + } + + result + } +} + +/// Internal helper function for common parsing & compilation logic +/// between the public functions. +fn parse_compile_internal( + result: &mut EvaluationResult, + code: &str, + file: Arc<codemap::File>, + location: Option<PathBuf>, + source: SourceCode, + builtins: Vec<(&'static str, Value)>, + src_builtins: Vec<(&'static str, &'static str)>, + enable_import: bool, + compiler_observer: &mut dyn CompilerObserver, +) -> Option<(Rc<Lambda>, Rc<GlobalsMap>)> { + let parsed = rnix::ast::Root::parse(code); + let parse_errors = parsed.errors(); + + if !parse_errors.is_empty() { + result.errors.push(Error::new( + ErrorKind::ParseErrors(parse_errors.to_vec()), + file.span, + )); + return None; + } + + // At this point we know that the code is free of parse errors and + // we can continue to compile it. The expression is persisted in + // the result, in case the caller needs it for something. + result.expr = parsed.tree().expr(); + + let builtins = crate::compiler::prepare_globals(builtins, src_builtins, source, enable_import); + + let compiler_result = match compiler::compile( + result.expr.as_ref().unwrap(), + location, + file, + builtins, + compiler_observer, + ) { + Ok(result) => result, + Err(err) => { + result.errors.push(err); + return None; + } + }; + + result.warnings = compiler_result.warnings; + result.errors.extend(compiler_result.errors); + + // Short-circuit if errors exist at this point (do not pass broken + // bytecode to the runtime). + if !result.errors.is_empty() { + return None; + } + + // Return the lambda (for execution) and the globals map (to + // ensure the invariant that the globals outlive the runtime). + Some((compiler_result.lambda, compiler_result.globals)) +} diff --git a/tvix/eval/src/nix_search_path.rs b/tvix/eval/src/nix_search_path.rs new file mode 100644 index 000000000000..79c19752f6c1 --- /dev/null +++ b/tvix/eval/src/nix_search_path.rs @@ -0,0 +1,243 @@ +use path_clean::PathClean; +use std::convert::Infallible; +use std::path::{Path, PathBuf}; +use std::str::FromStr; + +use crate::errors::ErrorKind; +use crate::EvalIO; + +#[derive(Debug, Clone, PartialEq, Eq)] +enum NixSearchPathEntry { + /// Resolve subdirectories of this path within `<...>` brackets. This + /// corresponds to bare paths within the `NIX_PATH` environment variable + /// + /// For example, with `NixSearchPathEntry::Path("/example")` and the following + /// directory structure: + /// + /// ```notrust + /// example + /// └── subdir + /// └── grandchild + /// ``` + /// + /// A Nix path literal `<subdir>` would resolve to `/example/subdir`, and a + /// Nix path literal `<subdir/grandchild>` would resolve to + /// `/example/subdir/grandchild` + Path(PathBuf), + + /// Resolve paths starting with `prefix` as subdirectories of `path`. This + /// corresponds to `prefix=path` within the `NIX_PATH` environment variable. + /// + /// For example, with `NixSearchPathEntry::Prefix { prefix: "prefix", path: + /// "/example" }` and the following directory structure: + /// + /// ```notrust + /// example + /// └── subdir + /// └── grandchild + /// ``` + /// + /// A Nix path literal `<prefix/subdir>` would resolve to `/example/subdir`, + /// and a Nix path literal `<prefix/subdir/grandchild>` would resolve to + /// `/example/subdir/grandchild` + Prefix { prefix: PathBuf, path: PathBuf }, +} + +fn canonicalise(path: PathBuf) -> Result<PathBuf, ErrorKind> { + let absolute = if path.is_absolute() { + path + } else { + // TODO(tazjin): probably panics in wasm? + std::env::current_dir() + .map_err(|e| ErrorKind::IO { + path: Some(path.clone()), + error: e.into(), + })? + .join(path) + } + .clean(); + + Ok(absolute) +} + +impl NixSearchPathEntry { + /// Determine whether this path entry matches the given lookup path. + /// + /// For bare paths, an entry is considered to match if a matching + /// file exists under it. + /// + /// For prefixed path, an entry matches if the prefix does. + // TODO(tazjin): verify these rules in the C++ impl, seems fishy. + fn resolve( + &self, + io: &mut dyn EvalIO, + lookup_path: &Path, + ) -> Result<Option<PathBuf>, ErrorKind> { + let path = match self { + NixSearchPathEntry::Path(parent) => canonicalise(parent.join(lookup_path))?, + + NixSearchPathEntry::Prefix { prefix, path } => { + if let Ok(child_path) = lookup_path.strip_prefix(prefix) { + canonicalise(path.join(child_path))? + } else { + return Ok(None); + } + } + }; + + if io.path_exists(&path).map_err(|e| ErrorKind::IO { + path: Some(path.clone()), + error: e.into(), + })? { + Ok(Some(path)) + } else { + Ok(None) + } + } +} + +impl FromStr for NixSearchPathEntry { + type Err = Infallible; + + fn from_str(s: &str) -> Result<Self, Self::Err> { + match s.split_once('=') { + Some((prefix, path)) => Ok(Self::Prefix { + prefix: prefix.into(), + path: path.into(), + }), + None => Ok(Self::Path(s.into())), + } + } +} + +/// Struct implementing the format and path resolution rules of the `NIX_PATH` +/// environment variable. +/// +/// This struct can be constructed by parsing a string using the [`FromStr`] +/// impl, or via [`str::parse`]. Nix `<...>` paths can then be resolved using +/// [`NixSearchPath::resolve`]. +#[derive(Default, Debug, Clone, PartialEq, Eq)] +pub struct NixSearchPath { + entries: Vec<NixSearchPathEntry>, +} + +impl NixSearchPath { + /// Attempt to resolve the given `path` within this [`NixSearchPath`] using the + /// path resolution rules for `<...>`-style paths + pub fn resolve<P>(&self, io: &mut dyn EvalIO, path: P) -> Result<PathBuf, ErrorKind> + where + P: AsRef<Path>, + { + let path = path.as_ref(); + for entry in &self.entries { + if let Some(p) = entry.resolve(io, path)? { + return Ok(p); + } + } + Err(ErrorKind::NixPathResolution(format!( + "path '{}' was not found in the Nix search path", + path.display() + ))) + } +} + +impl FromStr for NixSearchPath { + type Err = Infallible; + + fn from_str(s: &str) -> Result<Self, Self::Err> { + let entries = s + .split(':') + .map(|s| s.parse()) + .collect::<Result<Vec<_>, _>>()?; + Ok(NixSearchPath { entries }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + mod parse { + use super::*; + + #[test] + fn bare_paths() { + assert_eq!( + NixSearchPath::from_str("/foo/bar:/baz").unwrap(), + NixSearchPath { + entries: vec![ + NixSearchPathEntry::Path("/foo/bar".into()), + NixSearchPathEntry::Path("/baz".into()) + ], + } + ); + } + + #[test] + fn mixed_prefix_and_paths() { + assert_eq!( + NixSearchPath::from_str("nixpkgs=/my/nixpkgs:/etc/nixos").unwrap(), + NixSearchPath { + entries: vec![ + NixSearchPathEntry::Prefix { + prefix: "nixpkgs".into(), + path: "/my/nixpkgs".into() + }, + NixSearchPathEntry::Path("/etc/nixos".into()) + ], + } + ); + } + } + + mod resolve { + use crate::StdIO; + use path_clean::PathClean; + use std::env::current_dir; + + use super::*; + + #[test] + fn simple_dir() { + let nix_search_path = NixSearchPath::from_str("./.").unwrap(); + let mut io = StdIO {}; + let res = nix_search_path.resolve(&mut io, "src").unwrap(); + assert_eq!(res, current_dir().unwrap().join("src").clean()); + } + + #[test] + fn failed_resolution() { + let nix_search_path = NixSearchPath::from_str("./.").unwrap(); + let mut io = StdIO {}; + let err = nix_search_path.resolve(&mut io, "nope").unwrap_err(); + assert!( + matches!(err, ErrorKind::NixPathResolution(..)), + "err = {err:?}" + ); + } + + #[test] + fn second_in_path() { + let nix_search_path = NixSearchPath::from_str("./.:/").unwrap(); + let mut io = StdIO {}; + let res = nix_search_path.resolve(&mut io, "etc").unwrap(); + assert_eq!(res, Path::new("/etc")); + } + + #[test] + fn prefix() { + let nix_search_path = NixSearchPath::from_str("/:tvix=.").unwrap(); + let mut io = StdIO {}; + let res = nix_search_path.resolve(&mut io, "tvix/src").unwrap(); + assert_eq!(res, current_dir().unwrap().join("src").clean()); + } + + #[test] + fn matching_prefix() { + let nix_search_path = NixSearchPath::from_str("/:tvix=.").unwrap(); + let mut io = StdIO {}; + let res = nix_search_path.resolve(&mut io, "tvix").unwrap(); + assert_eq!(res, current_dir().unwrap().clean()); + } + } +} diff --git a/tvix/eval/src/observer.rs b/tvix/eval/src/observer.rs new file mode 100644 index 000000000000..cdc6ca16ff97 --- /dev/null +++ b/tvix/eval/src/observer.rs @@ -0,0 +1,287 @@ +//! Implements traits for things that wish to observe internal state +//! changes of tvix-eval. +//! +//! This can be used to gain insights from compilation, to trace the +//! runtime, and so on. +//! +//! All methods are optional, that is, observers can implement only +/// what they are interested in observing. +use std::io::Write; +use std::rc::Rc; +use tabwriter::TabWriter; + +use crate::chunk::Chunk; +use crate::generators::VMRequest; +use crate::opcode::{CodeIdx, OpCode}; +use crate::value::Lambda; +use crate::SourceCode; +use crate::Value; + +/// Implemented by types that wish to observe internal happenings of +/// the Tvix compiler. +pub trait CompilerObserver { + /// Called when the compiler finishes compilation of the top-level + /// of an expression (usually the root Nix expression of a file). + fn observe_compiled_toplevel(&mut self, _: &Rc<Lambda>) {} + + /// Called when the compiler finishes compilation of a + /// user-defined function. + /// + /// Note that in Nix there are only single argument functions, so + /// in an expression like `a: b: c: ...` this method will be + /// called three times. + fn observe_compiled_lambda(&mut self, _: &Rc<Lambda>) {} + + /// Called when the compiler finishes compilation of a thunk. + fn observe_compiled_thunk(&mut self, _: &Rc<Lambda>) {} +} + +/// Implemented by types that wish to observe internal happenings of +/// the Tvix virtual machine at runtime. +pub trait RuntimeObserver { + /// Called when the runtime enters a new call frame. + fn observe_enter_call_frame(&mut self, _arg_count: usize, _: &Rc<Lambda>, _call_depth: usize) {} + + /// Called when the runtime exits a call frame. + fn observe_exit_call_frame(&mut self, _frame_at: usize, _stack: &[Value]) {} + + /// Called when the runtime suspends a call frame. + fn observe_suspend_call_frame(&mut self, _frame_at: usize, _stack: &[Value]) {} + + /// Called when the runtime enters a generator frame. + fn observe_enter_generator(&mut self, _frame_at: usize, _name: &str, _stack: &[Value]) {} + + /// Called when the runtime exits a generator frame. + fn observe_exit_generator(&mut self, _frame_at: usize, _name: &str, _stack: &[Value]) {} + + /// Called when the runtime suspends a generator frame. + fn observe_suspend_generator(&mut self, _frame_at: usize, _name: &str, _stack: &[Value]) {} + + /// Called when a generator requests an action from the VM. + fn observe_generator_request(&mut self, _name: &str, _msg: &VMRequest) {} + + /// Called when the runtime replaces the current call frame for a + /// tail call. + fn observe_tail_call(&mut self, _frame_at: usize, _: &Rc<Lambda>) {} + + /// Called when the runtime enters a builtin. + fn observe_enter_builtin(&mut self, _name: &'static str) {} + + /// Called when the runtime exits a builtin. + fn observe_exit_builtin(&mut self, _name: &'static str, _stack: &[Value]) {} + + /// Called when the runtime *begins* executing an instruction. The + /// provided stack is the state at the beginning of the operation. + fn observe_execute_op(&mut self, _ip: CodeIdx, _: &OpCode, _: &[Value]) {} +} + +#[derive(Default)] +pub struct NoOpObserver {} + +impl CompilerObserver for NoOpObserver {} +impl RuntimeObserver for NoOpObserver {} + +/// An observer that prints disassembled chunk information to its +/// internal writer whenwever the compiler emits a toplevel function, +/// closure or thunk. +pub struct DisassemblingObserver<W: Write> { + source: SourceCode, + writer: TabWriter<W>, +} + +impl<W: Write> DisassemblingObserver<W> { + pub fn new(source: SourceCode, writer: W) -> Self { + Self { + source, + writer: TabWriter::new(writer), + } + } + + fn lambda_header(&mut self, kind: &str, lambda: &Rc<Lambda>) { + let _ = writeln!( + &mut self.writer, + "=== compiled {} @ {:p} ({} ops) ===", + kind, + *lambda, + lambda.chunk.code.len() + ); + } + + fn disassemble_chunk(&mut self, chunk: &Chunk) { + // calculate width of the widest address in the chunk + let width = format!("{:#x}", chunk.code.len() - 1).len(); + + for (idx, _) in chunk.code.iter().enumerate() { + let _ = chunk.disassemble_op(&mut self.writer, &self.source, width, CodeIdx(idx)); + } + } +} + +impl<W: Write> CompilerObserver for DisassemblingObserver<W> { + fn observe_compiled_toplevel(&mut self, lambda: &Rc<Lambda>) { + self.lambda_header("toplevel", lambda); + self.disassemble_chunk(&lambda.chunk); + let _ = self.writer.flush(); + } + + fn observe_compiled_lambda(&mut self, lambda: &Rc<Lambda>) { + self.lambda_header("lambda", lambda); + self.disassemble_chunk(&lambda.chunk); + let _ = self.writer.flush(); + } + + fn observe_compiled_thunk(&mut self, lambda: &Rc<Lambda>) { + self.lambda_header("thunk", lambda); + self.disassemble_chunk(&lambda.chunk); + let _ = self.writer.flush(); + } +} + +/// An observer that collects a textual representation of an entire +/// runtime execution. +pub struct TracingObserver<W: Write> { + writer: TabWriter<W>, +} + +impl<W: Write> TracingObserver<W> { + pub fn new(writer: W) -> Self { + Self { + writer: TabWriter::new(writer), + } + } + + fn write_value(&mut self, val: &Value) { + let _ = match val { + // Potentially large types which we only want to print + // the type of (and avoid recursing). + Value::List(l) => write!(&mut self.writer, "list[{}] ", l.len()), + Value::Attrs(a) => write!(&mut self.writer, "attrs[{}] ", a.len()), + Value::Thunk(t) if t.is_evaluated() => Ok(self.write_value(&t.value())), + + // For other value types, defer to the standard value printer. + _ => write!(&mut self.writer, "{} ", val), + }; + } + + fn write_stack(&mut self, stack: &[Value]) { + let _ = write!(&mut self.writer, "[ "); + + // Print out a maximum of 6 values from the top of the stack, + // before abbreviating it to `...`. + for (i, val) in stack.iter().rev().enumerate() { + if i == 6 { + let _ = write!(&mut self.writer, "..."); + break; + } + + self.write_value(&val); + } + + let _ = writeln!(&mut self.writer, "]"); + } +} + +impl<W: Write> RuntimeObserver for TracingObserver<W> { + fn observe_enter_call_frame( + &mut self, + arg_count: usize, + lambda: &Rc<Lambda>, + call_depth: usize, + ) { + let _ = write!(&mut self.writer, "=== entering "); + + let _ = if arg_count == 0 { + write!(&mut self.writer, "thunk ") + } else { + write!(&mut self.writer, "closure ") + }; + + if let Some(name) = &lambda.name { + let _ = write!(&mut self.writer, "'{}' ", name); + } + + let _ = writeln!( + &mut self.writer, + "in frame[{}] @ {:p} ===", + call_depth, *lambda + ); + } + + /// Called when the runtime exits a call frame. + fn observe_exit_call_frame(&mut self, frame_at: usize, stack: &[Value]) { + let _ = write!(&mut self.writer, "=== exiting frame {} ===\t ", frame_at); + self.write_stack(stack); + } + + fn observe_suspend_call_frame(&mut self, frame_at: usize, stack: &[Value]) { + let _ = write!(&mut self.writer, "=== suspending frame {} ===\t", frame_at); + + self.write_stack(stack); + } + + fn observe_enter_generator(&mut self, frame_at: usize, name: &str, stack: &[Value]) { + let _ = write!( + &mut self.writer, + "=== entering generator frame '{}' [{}] ===\t", + name, frame_at, + ); + + self.write_stack(stack); + } + + fn observe_exit_generator(&mut self, frame_at: usize, name: &str, stack: &[Value]) { + let _ = write!( + &mut self.writer, + "=== exiting generator '{}' [{}] ===\t", + name, frame_at + ); + + self.write_stack(stack); + } + + fn observe_suspend_generator(&mut self, frame_at: usize, name: &str, stack: &[Value]) { + let _ = write!( + &mut self.writer, + "=== suspending generator '{}' [{}] ===\t", + name, frame_at + ); + + self.write_stack(stack); + } + + fn observe_generator_request(&mut self, name: &str, msg: &VMRequest) { + let _ = writeln!( + &mut self.writer, + "=== generator '{}' requested {} ===", + name, msg + ); + } + + fn observe_enter_builtin(&mut self, name: &'static str) { + let _ = writeln!(&mut self.writer, "=== entering builtin {} ===", name); + } + + fn observe_exit_builtin(&mut self, name: &'static str, stack: &[Value]) { + let _ = write!(&mut self.writer, "=== exiting builtin {} ===\t", name); + self.write_stack(stack); + } + + fn observe_tail_call(&mut self, frame_at: usize, lambda: &Rc<Lambda>) { + let _ = writeln!( + &mut self.writer, + "=== tail-calling {:p} in frame[{}] ===", + *lambda, frame_at + ); + } + + fn observe_execute_op(&mut self, ip: CodeIdx, op: &OpCode, stack: &[Value]) { + let _ = write!(&mut self.writer, "{:04} {:?}\t", ip.0, op); + self.write_stack(stack); + } +} + +impl<W: Write> Drop for TracingObserver<W> { + fn drop(&mut self) { + let _ = self.writer.flush(); + } +} diff --git a/tvix/eval/src/opcode.rs b/tvix/eval/src/opcode.rs new file mode 100644 index 000000000000..130e242668d6 --- /dev/null +++ b/tvix/eval/src/opcode.rs @@ -0,0 +1,186 @@ +//! This module implements the instruction set running on the abstract +//! machine implemented by tvix. + +use std::ops::{AddAssign, Sub}; + +/// Index of a constant in the current code chunk. +#[repr(transparent)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct ConstantIdx(pub usize); + +/// Index of an instruction in the current code chunk. +#[repr(transparent)] +#[derive(Clone, Copy, Debug)] +pub struct CodeIdx(pub usize); + +impl AddAssign<usize> for CodeIdx { + fn add_assign(&mut self, rhs: usize) { + *self = CodeIdx(self.0 + rhs) + } +} + +impl Sub<usize> for CodeIdx { + type Output = Self; + + fn sub(self, rhs: usize) -> Self::Output { + CodeIdx(self.0 - rhs) + } +} + +/// Index of a value in the runtime stack. This is an offset +/// *relative to* the VM value stack_base of the CallFrame +/// containing the opcode which contains this StackIdx. +#[repr(transparent)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd)] +pub struct StackIdx(pub usize); + +/// Index of an upvalue within a closure's bound-variable upvalue +/// list. This is an absolute index into the Upvalues of the +/// CallFrame containing the opcode which contains this UpvalueIdx. +#[repr(transparent)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct UpvalueIdx(pub usize); + +/// Offset by which an instruction pointer should change in a jump. +#[repr(transparent)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct JumpOffset(pub usize); + +/// Provided count for an instruction (could represent e.g. a number +/// of elements). +#[repr(transparent)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct Count(pub usize); + +/// All variants of this enum carry a bounded amount of data to +/// ensure that no heap allocations are needed for an Opcode. +#[warn(variant_size_differences)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum OpCode { + /// Push a constant onto the stack. + OpConstant(ConstantIdx), + + /// Discard a value from the stack. + OpPop, + + // Unary operators + OpInvert, + OpNegate, + + // Arithmetic binary operators + OpAdd, + OpSub, + OpMul, + OpDiv, + + // Comparison operators + OpEqual, + OpLess, + OpLessOrEq, + OpMore, + OpMoreOrEq, + + // Logical operators & generic jumps + OpJump(JumpOffset), + OpJumpIfTrue(JumpOffset), + OpJumpIfFalse(JumpOffset), + OpJumpIfNotFound(JumpOffset), + + // Attribute sets + /// Construct an attribute set from the given number of key-value pairs on the top of the stack + /// + /// Note that this takes the count of *pairs*, not the number of *stack values* - the actual + /// number of values popped off the stack will be twice the argument to this op + OpAttrs(Count), + OpAttrsUpdate, + OpAttrsSelect, + OpAttrsTrySelect, + OpHasAttr, + + /// Throw an error if the attribute set at the top of the stack has any attributes + /// other than those listed in the formals of the current lambda + /// + /// Panics if the current frame is not a lambda with formals + OpValidateClosedFormals, + + // `with`-handling + OpPushWith(StackIdx), + OpPopWith, + OpResolveWith, + + // Lists + OpList(Count), + OpConcat, + + // Strings + OpInterpolate(Count), + /// Force the Value on the stack and coerce it to a string, always using + /// `CoercionKind::Weak`. + OpCoerceToString, + + // Paths + /// Attempt to resolve the Value on the stack using the configured [`NixSearchPath`][] + /// + /// [`NixSearchPath`]: crate::nix_search_path::NixSearchPath + OpFindFile, + + /// Attempt to resolve a path literal relative to the home dir + OpResolveHomePath, + + // Type assertion operators + OpAssertBool, + + /// Access local identifiers with statically known positions. + OpGetLocal(StackIdx), + + /// Close scopes while leaving their expression value around. + OpCloseScope(Count), // number of locals to pop + + /// Return an error indicating that an `assert` failed + OpAssertFail, + + // Lambdas & closures + OpCall, + OpGetUpvalue(UpvalueIdx), + /// A Closure which has upvalues but no self-references + OpClosure(ConstantIdx), + /// A Closure which has self-references (direct or via upvalues) + OpThunkClosure(ConstantIdx), + /// A suspended thunk, used to ensure laziness + OpThunkSuspended(ConstantIdx), + OpForce, + + /// Finalise initialisation of the upvalues of the value in the given stack + /// index (which must be a Value::Thunk) after the scope is fully bound. + OpFinalise(StackIdx), + + /// Final instruction emitted in a chunk. Does not have an + /// inherent effect, but can simplify VM logic as a marker in some + /// cases. + /// + /// Can be thought of as "returning" the value to the parent + /// frame, hence the name. + OpReturn, + + // [`OpClosure`], [`OpThunkSuspended`], and [`OpThunkClosure`] have a + // variable number of arguments to the instruction, which is + // represented here by making their data part of the opcodes. + // Each of these two opcodes has a `ConstantIdx`, which must + // reference a `Value::Blueprint(Lambda)`. The `upvalue_count` + // field in that `Lambda` indicates the number of arguments it + // takes, and the opcode must be followed by exactly this number + // of `Data*` opcodes. The VM skips over these by advancing the + // instruction pointer. + // + // It is illegal for a `Data*` opcode to appear anywhere else. + /// Populate a static upvalue by copying from the stack immediately. + DataStackIdx(StackIdx), + /// Populate a static upvalue of a thunk by copying it the stack, but do + /// when the thunk is finalised (by OpFinalise) rather than immediately. + DataDeferredLocal(StackIdx), + /// Populate a static upvalue by copying it from the upvalues of an + /// enclosing scope. + DataUpvalueIdx(UpvalueIdx), + /// Populate dynamic upvalues by saving a copy of the with-stack. + DataCaptureWith, +} diff --git a/tvix/eval/src/pretty_ast.rs b/tvix/eval/src/pretty_ast.rs new file mode 100644 index 000000000000..5ac115e21c89 --- /dev/null +++ b/tvix/eval/src/pretty_ast.rs @@ -0,0 +1,468 @@ +//! Pretty-printed format for the rnix AST representation. +//! +//! The AST is serialised into a JSON structure that can then be +//! printed in either minimised or well-formatted style. + +use rnix::ast::{self, AstToken, HasEntry}; +use serde::{ser::SerializeMap, Serialize, Serializer}; + +pub fn pretty_print_expr(expr: &ast::Expr) -> String { + serde_json::ser::to_string_pretty(&SerializeAST(expr)) + .expect("serializing AST should always succeed") +} + +#[repr(transparent)] +struct SerializeAST<S>(S); + +impl<'a> Serialize for SerializeAST<&'a ast::Apply> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(3))?; + map.serialize_entry("kind", "apply")?; + map.serialize_entry("fn", &SerializeAST(&self.0.lambda().unwrap()))?; + map.serialize_entry("arg", &SerializeAST(&self.0.argument().unwrap()))?; + map.end() + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::Assert> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(3))?; + map.serialize_entry("kind", "assert")?; + map.serialize_entry("condition", &SerializeAST(&self.0.condition().unwrap()))?; + map.serialize_entry("body", &SerializeAST(&self.0.body().unwrap()))?; + map.end() + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::Error> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(2))?; + map.serialize_entry("kind", "error")?; + map.serialize_entry("node", &self.0.to_string())?; + map.end() + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::IfElse> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(4))?; + map.serialize_entry("kind", "if_else")?; + map.serialize_entry("condition", &SerializeAST(&self.0.condition().unwrap()))?; + map.serialize_entry("then_body", &SerializeAST(&self.0.body().unwrap()))?; + map.serialize_entry("else_body", &SerializeAST(&self.0.else_body().unwrap()))?; + map.end() + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::Select> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let size = match self.0.default_expr() { + Some(_) => 4, + None => 3, + }; + + let mut map = serializer.serialize_map(Some(size))?; + map.serialize_entry("kind", "select")?; + map.serialize_entry("set", &SerializeAST(&self.0.expr().unwrap()))?; + map.serialize_entry("path", &SerializeAST(self.0.attrpath().unwrap()))?; + + if let Some(default) = self.0.default_expr() { + map.serialize_entry("default", &SerializeAST(&default))?; + } + + map.end() + } +} + +impl Serialize for SerializeAST<ast::InterpolPart<String>> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + match &self.0 { + ast::InterpolPart::Literal(s) => Serialize::serialize(s, serializer), + ast::InterpolPart::Interpolation(node) => { + Serialize::serialize(&SerializeAST(&node.expr().unwrap()), serializer) + } + } + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::Str> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(2))?; + map.serialize_entry("kind", "string")?; + + map.serialize_entry( + "parts", + &self + .0 + .normalized_parts() + .into_iter() + .map(SerializeAST) + .collect::<Vec<_>>(), + )?; + + map.end() + } +} + +impl Serialize for SerializeAST<ast::InterpolPart<ast::PathContent>> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + match &self.0 { + ast::InterpolPart::Literal(p) => Serialize::serialize(p.syntax().text(), serializer), + ast::InterpolPart::Interpolation(node) => { + Serialize::serialize(&SerializeAST(&node.expr().unwrap()), serializer) + } + } + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::Path> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(2))?; + map.serialize_entry("kind", "path")?; + + map.serialize_entry( + "parts", + &self.0.parts().map(SerializeAST).collect::<Vec<_>>(), + )?; + + map.end() + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::Literal> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(2))?; + map.serialize_entry("kind", "literal")?; + + match self.0.kind() { + ast::LiteralKind::Float(val) => map.serialize_entry("float", &val.value().unwrap()), + ast::LiteralKind::Integer(val) => map.serialize_entry("int", &val.value().unwrap()), + ast::LiteralKind::Uri(val) => map.serialize_entry("uri", val.syntax().text()), + }?; + + map.end() + } +} + +impl Serialize for SerializeAST<ast::PatEntry> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(None)?; + map.serialize_entry("ident", &SerializeAST(&self.0.ident().unwrap()))?; + + if let Some(default) = self.0.default() { + map.serialize_entry("default", &SerializeAST(&default))?; + } + + map.end() + } +} + +impl Serialize for SerializeAST<ast::Param> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + match &self.0 { + ast::Param::Pattern(pat) => { + let mut map = serializer.serialize_map(None)?; + map.serialize_entry("kind", "formals")?; + + map.serialize_entry( + "entries", + &pat.pat_entries().map(SerializeAST).collect::<Vec<_>>(), + )?; + + if let Some(bind) = pat.pat_bind() { + map.serialize_entry("bind", &SerializeAST(&bind.ident().unwrap()))?; + } + + map.serialize_entry("ellipsis", &pat.ellipsis_token().is_some())?; + + map.end() + } + + ast::Param::IdentParam(node) => { + Serialize::serialize(&SerializeAST(&node.ident().unwrap()), serializer) + } + } + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::Lambda> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(3))?; + map.serialize_entry("kind", "lambda")?; + map.serialize_entry("param", &SerializeAST(self.0.param().unwrap()))?; + map.serialize_entry("body", &SerializeAST(self.0.body().unwrap()))?; + map.end() + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::LegacyLet> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(3))?; + map.serialize_entry("kind", "legacy_let")?; + + map.serialize_entry( + "entries", + &self + .0 + .attrpath_values() + .map(SerializeAST) + .collect::<Vec<_>>(), + )?; + + map.serialize_entry( + "inherits", + &self.0.inherits().map(SerializeAST).collect::<Vec<_>>(), + )?; + + map.end() + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::LetIn> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(3))?; + map.serialize_entry("kind", "let")?; + + map.serialize_entry( + "entries", + &self + .0 + .attrpath_values() + .map(SerializeAST) + .collect::<Vec<_>>(), + )?; + + map.serialize_entry( + "inherits", + &self.0.inherits().map(SerializeAST).collect::<Vec<_>>(), + )?; + + map.serialize_entry("body", &SerializeAST(&self.0.body().unwrap()))?; + map.end() + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::List> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let list = self.0.items().map(SerializeAST).collect::<Vec<_>>(); + + let mut map = serializer.serialize_map(Some(2))?; + map.serialize_entry("kind", "list")?; + map.serialize_entry("items", &list)?; + + map.end() + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::BinOp> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(4))?; + map.serialize_entry("kind", "binary_op")?; + + map.serialize_entry( + "operator", + match self.0.operator().unwrap() { + ast::BinOpKind::Concat => "concat", + ast::BinOpKind::Update => "update", + ast::BinOpKind::Add => "add", + ast::BinOpKind::Sub => "sub", + ast::BinOpKind::Mul => "mul", + ast::BinOpKind::Div => "div", + ast::BinOpKind::And => "and", + ast::BinOpKind::Equal => "equal", + ast::BinOpKind::Implication => "implication", + ast::BinOpKind::Less => "less", + ast::BinOpKind::LessOrEq => "less_or_eq", + ast::BinOpKind::More => "more", + ast::BinOpKind::MoreOrEq => "more_or_eq", + ast::BinOpKind::NotEqual => "not_equal", + ast::BinOpKind::Or => "or", + }, + )?; + + map.serialize_entry("lhs", &SerializeAST(&self.0.lhs().unwrap()))?; + map.serialize_entry("rhs", &SerializeAST(&self.0.rhs().unwrap()))?; + map.end() + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::Paren> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(2))?; + map.serialize_entry("kind", "paren")?; + map.serialize_entry("expr", &SerializeAST(&self.0.expr().unwrap()))?; + map.end() + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::Root> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(2))?; + map.serialize_entry("kind", "root")?; + map.serialize_entry("expr", &SerializeAST(&self.0.expr().unwrap()))?; + map.end() + } +} + +impl Serialize for SerializeAST<ast::AttrpathValue> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(2))?; + map.serialize_entry("name", &SerializeAST(self.0.attrpath().unwrap()))?; + map.serialize_entry("value", &SerializeAST(self.0.value().unwrap()))?; + map.end() + } +} + +impl Serialize for SerializeAST<ast::Inherit> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(None)?; + + if let Some(from) = self.0.from() { + map.serialize_entry("namespace", &SerializeAST(&from.expr().unwrap()))?; + } + + map.serialize_entry( + "names", + &self.0.attrs().map(SerializeAST).collect::<Vec<_>>(), + )?; + + map.end() + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::AttrSet> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(None)?; + map.serialize_entry("kind", "attrset")?; + map.serialize_entry("recursive", &self.0.rec_token().is_some())?; + + map.serialize_entry( + "entries", + &self + .0 + .attrpath_values() + .map(SerializeAST) + .collect::<Vec<_>>(), + )?; + + map.serialize_entry( + "inherits", + &self.0.inherits().map(SerializeAST).collect::<Vec<_>>(), + )?; + + map.end() + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::UnaryOp> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(3))?; + map.serialize_entry("kind", "unary_op")?; + + map.serialize_entry( + "operator", + match self.0.operator().unwrap() { + ast::UnaryOpKind::Invert => "invert", + ast::UnaryOpKind::Negate => "negate", + }, + )?; + + map.serialize_entry("expr", &SerializeAST(&self.0.expr().unwrap()))?; + map.end() + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::Ident> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(2))?; + map.serialize_entry("kind", "ident")?; + map.serialize_entry("ident", self.0.ident_token().unwrap().text())?; + map.end() + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::With> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(3))?; + map.serialize_entry("kind", "with")?; + map.serialize_entry("with", &SerializeAST(&self.0.namespace().unwrap()))?; + map.serialize_entry("body", &SerializeAST(&self.0.body().unwrap()))?; + map.end() + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::Dynamic> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(2))?; + map.serialize_entry("kind", "dynamic")?; + map.serialize_entry("expr", &SerializeAST(&self.0.expr().unwrap()))?; + map.end() + } +} + +impl Serialize for SerializeAST<ast::Attr> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + match &self.0 { + ast::Attr::Ident(ident) => Serialize::serialize(&SerializeAST(ident), serializer), + ast::Attr::Dynamic(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Attr::Str(node) => Serialize::serialize(&SerializeAST(node), serializer), + } + } +} + +impl Serialize for SerializeAST<ast::Attrpath> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(2))?; + map.serialize_entry("kind", "attrpath")?; + + map.serialize_entry( + "path", + &self.0.attrs().map(SerializeAST).collect::<Vec<_>>(), + )?; + + map.end() + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::HasAttr> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + let mut map = serializer.serialize_map(Some(3))?; + map.serialize_entry("kind", "has_attr")?; + map.serialize_entry("expr", &SerializeAST(&self.0.expr().unwrap()))?; + map.serialize_entry("attrpath", &SerializeAST(self.0.attrpath().unwrap()))?; + map.end() + } +} + +impl<'a> Serialize for SerializeAST<&'a ast::Expr> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + match self.0 { + ast::Expr::Apply(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Expr::Assert(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Expr::Error(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Expr::IfElse(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Expr::Select(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Expr::Str(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Expr::Path(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Expr::Literal(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Expr::Lambda(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Expr::LegacyLet(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Expr::LetIn(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Expr::List(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Expr::BinOp(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Expr::Paren(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Expr::Root(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Expr::AttrSet(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Expr::UnaryOp(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Expr::Ident(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Expr::With(node) => Serialize::serialize(&SerializeAST(node), serializer), + ast::Expr::HasAttr(node) => Serialize::serialize(&SerializeAST(node), serializer), + } + } +} + +impl Serialize for SerializeAST<ast::Expr> { + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + SerializeAST(&self.0).serialize(serializer) + } +} diff --git a/tvix/eval/src/properties.rs b/tvix/eval/src/properties.rs new file mode 100644 index 000000000000..45c1cdfce911 --- /dev/null +++ b/tvix/eval/src/properties.rs @@ -0,0 +1,164 @@ +//! Macros that generate proptest test suites checking laws of stdlib traits + +/// Generate a suite of tests to check the laws of the [`Eq`] impl for the given type +macro_rules! eq_laws { + ($ty: ty) => { + eq_laws!( + #[strategy(::proptest::arbitrary::any::<$ty>())] + $ty, + Default::default() + ); + }; + ($ty: ty, $config: expr) => { + eq_laws!( + #[strategy(::proptest::arbitrary::any::<$ty>())] + $ty, + $config + ); + }; + (#[$meta: meta] $ty: ty, $config: expr) => { + #[allow(clippy::eq_op)] + mod eq { + use test_strategy::proptest; + + use super::*; + + #[proptest($config)] + fn reflexive(#[$meta] x: $ty) { + assert!(x == x); + } + + #[proptest($config)] + fn symmetric(#[$meta] x: $ty, #[$meta] y: $ty) { + assert_eq!(x == y, y == x); + } + + #[proptest($config)] + fn transitive(#[$meta] x: $ty, #[$meta] y: $ty, #[$meta] z: $ty) { + if x == y && y == z { + assert!(x == z); + } + } + } + }; +} + +/// Generate a suite of tests to check the laws of the [`Ord`] impl for the given type +macro_rules! ord_laws { + ($ty: ty) => { + ord_laws!( + #[strategy(::proptest::arbitrary::any::<$ty>())] + $ty, + Default::default() + ); + }; + ($ty: ty, $config: expr) => { + ord_laws!( + #[strategy(::proptest::arbitrary::any::<$ty>())] + $ty, + $config + ); + }; + (#[$meta: meta] $ty: ty, $config: expr) => { + mod ord { + use test_strategy::proptest; + + use super::*; + + #[proptest($config)] + fn partial_cmp_matches_cmp(#[$meta] x: $ty, #[$meta] y: $ty) { + assert_eq!(x.partial_cmp(&y), Some(x.cmp(&y))); + } + + #[proptest($config)] + fn dual(#[$meta] x: $ty, #[$meta] y: $ty) { + if x < y { + assert!(y > x); + } + if y < x { + assert!(x > y); + } + } + + #[proptest($config)] + fn le_transitive(#[$meta] x: $ty, #[$meta] y: $ty, #[$meta] z: $ty) { + if x < y && y < z { + assert!(x < z) + } + } + + #[proptest($config)] + fn gt_transitive(#[$meta] x: $ty, #[$meta] y: $ty, #[$meta] z: $ty) { + if x > y && y > z { + assert!(x > z) + } + } + + #[proptest($config)] + fn trichotomy(#[$meta] x: $ty, #[$meta] y: $ty) { + let less = x < y; + let greater = x > y; + let eq = x == y; + + if less { + assert!(!greater); + assert!(!eq); + } + + if greater { + assert!(!less); + assert!(!eq); + } + + if eq { + assert!(!less); + assert!(!greater); + } + } + } + }; +} + +/// Generate a test to check the laws of the [`Hash`] impl for the given type +macro_rules! hash_laws { + ($ty: ty) => { + hash_laws!( + #[strategy(::proptest::arbitrary::any::<$ty>())] + $ty, + Default::default() + ); + }; + ($ty: ty, $config: expr) => { + hash_laws!( + #[strategy(::proptest::arbitrary::any::<$ty>())] + $ty, + $config + ); + }; + (#[$meta: meta] $ty: ty, $config: expr) => { + mod hash { + use test_strategy::proptest; + + use super::*; + + #[proptest($config)] + fn matches_eq(#[$meta] x: $ty, #[$meta] y: $ty) { + let hash = |x: &$ty| { + use std::hash::Hasher; + + let mut hasher = ::std::collections::hash_map::DefaultHasher::new(); + x.hash(&mut hasher); + hasher.finish() + }; + + if x == y { + assert_eq!(hash(&x), hash(&y)); + } + } + } + }; +} + +pub(crate) use eq_laws; +pub(crate) use hash_laws; +pub(crate) use ord_laws; diff --git a/tvix/eval/src/source.rs b/tvix/eval/src/source.rs new file mode 100644 index 000000000000..649679536080 --- /dev/null +++ b/tvix/eval/src/source.rs @@ -0,0 +1,63 @@ +//! This module contains utilities for dealing with the codemap that +//! needs to be carried across different compiler instantiations in an +//! evaluation. +//! +//! The data type `SourceCode` should be carried through all relevant +//! places instead of copying the codemap structures directly. + +use std::{ + cell::{Ref, RefCell, RefMut}, + rc::Rc, + sync::Arc, +}; + +use codemap::{CodeMap, Span}; + +/// Tracks all source code in a Tvix evaluation for accurate error +/// reporting. +#[derive(Clone, Debug)] +pub struct SourceCode(Rc<RefCell<CodeMap>>); + +impl SourceCode { + /// Create a new SourceCode instance. + pub fn new() -> Self { + SourceCode(Rc::new(RefCell::new(CodeMap::new()))) + } + + /// Access a read-only reference to the codemap. + pub fn codemap(&self) -> Ref<CodeMap> { + self.0.borrow() + } + + /// Access a writable reference to the codemap. + fn codemap_mut(&self) -> RefMut<CodeMap> { + self.0.borrow_mut() + } + + /// Add a file to the codemap. The returned Arc is managed by the + /// codemap internally and can be used like a normal reference. + pub fn add_file(&self, name: String, code: String) -> Arc<codemap::File> { + self.codemap_mut().add_file(name, code) + } + + /// Retrieve the line number of the given span. If it spans + /// multiple lines, the first line will be returned. + pub fn get_line(&self, span: Span) -> usize { + // lines are 0-indexed in the codemap, but users probably want + // real line numbers + self.codemap().look_up_span(span).begin.line + 1 + } + + /// Returns the literal source slice of the given span. + pub fn source_slice(&self, span: Span) -> Ref<str> { + Ref::map(self.codemap(), |c| { + c.find_file(span.low()).source_slice(span) + }) + } + + /// Returns the reference to the file structure that a given span + /// is in. + pub fn get_file(&self, span: Span) -> Arc<codemap::File> { + self.codemap().look_up_span(span).file + } +} diff --git a/tvix/eval/src/spans.rs b/tvix/eval/src/spans.rs new file mode 100644 index 000000000000..c0130a665428 --- /dev/null +++ b/tvix/eval/src/spans.rs @@ -0,0 +1,121 @@ +//! Utilities for dealing with span tracking in the compiler and in +//! error reporting. + +use crate::opcode::CodeIdx; +use crate::value::Lambda; +use codemap::{File, Span}; +use rnix::ast; +use rowan::ast::AstNode; +use std::rc::Rc; + +/// Helper struct to carry information required for making a span, but +/// without actually performing the (expensive) span lookup. +/// +/// This is used for tracking spans across thunk boundaries, as they +/// are frequently instantiated but spans are only used in error or +/// warning cases. +#[derive(Clone, Debug)] +pub enum LightSpan { + /// The span has already been computed and can just be used right + /// away. + Actual { span: Span }, + + /// The span needs to be computed from the provided data, but only + /// when it is required. + Delayed { lambda: Rc<Lambda>, offset: CodeIdx }, +} + +impl LightSpan { + pub fn new_delayed(lambda: Rc<Lambda>, offset: CodeIdx) -> Self { + Self::Delayed { lambda, offset } + } + + pub fn new_actual(span: Span) -> Self { + Self::Actual { span } + } + + pub fn span(&self) -> Span { + match self { + LightSpan::Actual { span } => *span, + LightSpan::Delayed { lambda, offset } => lambda.chunk.get_span(*offset), + } + } +} + +impl From<Span> for LightSpan { + fn from(span: Span) -> Self { + LightSpan::Actual { span } + } +} + +/// Trait implemented by all types from which we can retrieve a span. +pub trait ToSpan { + fn span_for(&self, file: &File) -> Span; +} + +impl ToSpan for Span { + fn span_for(&self, _: &File) -> Span { + *self + } +} + +impl ToSpan for rnix::TextRange { + fn span_for(&self, file: &File) -> Span { + file.span + .subspan(u32::from(self.start()) as u64, u32::from(self.end()) as u64) + } +} + +impl ToSpan for rnix::SyntaxToken { + fn span_for(&self, file: &File) -> Span { + self.text_range().span_for(file) + } +} + +impl ToSpan for rnix::SyntaxNode { + fn span_for(&self, file: &File) -> Span { + self.text_range().span_for(file) + } +} + +/// Generates a `ToSpan` implementation for a type implementing +/// `rowan::AstNode`. This is impossible to do as a blanket +/// implementation because `rustc` forbids these implementations for +/// traits from third-party crates due to a belief that semantic +/// versioning truly could work (it doesn't). +macro_rules! expr_to_span { + ( $type:path ) => { + impl ToSpan for $type { + fn span_for(&self, file: &File) -> Span { + self.syntax().span_for(file) + } + } + }; +} + +expr_to_span!(ast::Expr); +expr_to_span!(ast::Apply); +expr_to_span!(ast::Assert); +expr_to_span!(ast::Attr); +expr_to_span!(ast::AttrSet); +expr_to_span!(ast::Attrpath); +expr_to_span!(ast::AttrpathValue); +expr_to_span!(ast::BinOp); +expr_to_span!(ast::HasAttr); +expr_to_span!(ast::Ident); +expr_to_span!(ast::IdentParam); +expr_to_span!(ast::IfElse); +expr_to_span!(ast::Inherit); +expr_to_span!(ast::Interpol); +expr_to_span!(ast::Lambda); +expr_to_span!(ast::LegacyLet); +expr_to_span!(ast::LetIn); +expr_to_span!(ast::List); +expr_to_span!(ast::Literal); +expr_to_span!(ast::PatBind); +expr_to_span!(ast::Path); +expr_to_span!(ast::Pattern); +expr_to_span!(ast::Select); +expr_to_span!(ast::Str); +expr_to_span!(ast::UnaryOp); +expr_to_span!(ast::With); diff --git a/tvix/eval/src/systems.rs b/tvix/eval/src/systems.rs new file mode 100644 index 000000000000..16386cb9e0ad --- /dev/null +++ b/tvix/eval/src/systems.rs @@ -0,0 +1,351 @@ +/// true iff the argument is recognized by cppnix as the second +/// coordinate of a "nix double" +fn is_second_coordinate(x: &str) -> bool { + matches!(x, "linux" | "darwin" | "netbsd" | "openbsd" | "freebsd") +} + +/// This function takes an llvm triple (which may have three or four +/// components, separated by dashes) and returns the "best" +/// approximation as a nix double, where "best" is currently defined +/// as "however cppnix handles it". +pub fn llvm_triple_to_nix_double(llvm_triple: &str) -> String { + let parts: Vec<&str> = llvm_triple.split('-').collect(); + let cpu = match parts[0] { + "armv6" => "armv6l", // cppnix appends an "l" to armv6 + "armv7" => "armv7l", // cppnix appends an "l" to armv7 + x => match x.as_bytes() { + [b'i', _, b'8', b'6'] => "i686", // cppnix glob-matches against i*86 + _ => x, + }, + }; + let os = match parts[1..] { + [_vendor, kernel, _environment] if is_second_coordinate(kernel) => kernel, + [_vendor, kernel] if is_second_coordinate(kernel) => kernel, + [kernel, _environment] if is_second_coordinate(kernel) => kernel, + + // Rustc uses wasm32-unknown-unknown, which is rejected by + // config.sub, for wasm-in-the-browser environments. Rustc + // should be using wasm32-unknown-none, which config.sub + // accepts. Hopefully the rustc people will change their + // triple before stabilising this triple. In the meantime, + // we fix it here in order to unbreak tvixbolt. + // + // https://doc.rust-lang.org/beta/nightly-rustc/rustc_target/spec/wasm32_unknown_unknown/index.html + ["unknown", "unknown"] if cpu == "wasm32" => "none", + + _ => panic!("unrecognized triple {llvm_triple}"), + }; + format!("{cpu}-{os}") +} + +#[cfg(test)] +mod tests { + use super::*; + #[test] + fn test_systems() { + assert_eq!( + llvm_triple_to_nix_double("aarch64-unknown-linux-gnu"), + "aarch64-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("i686-unknown-linux-gnu"), + "i686-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("x86_64-apple-darwin"), + "x86_64-darwin" + ); + assert_eq!( + llvm_triple_to_nix_double("x86_64-unknown-linux-gnu"), + "x86_64-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("aarch64-apple-darwin"), + "aarch64-darwin" + ); + assert_eq!( + llvm_triple_to_nix_double("aarch64-unknown-linux-musl"), + "aarch64-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("arm-unknown-linux-gnueabi"), + "arm-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("arm-unknown-linux-gnueabihf"), + "arm-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("armv7-unknown-linux-gnueabihf"), + "armv7l-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("mips-unknown-linux-gnu"), + "mips-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("mips64-unknown-linux-gnuabi64"), + "mips64-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("mips64-unknown-linux-gnuabin32"), + "mips64-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("mips64el-unknown-linux-gnuabi64"), + "mips64el-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("mips64el-unknown-linux-gnuabin32"), + "mips64el-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("mipsel-unknown-linux-gnu"), + "mipsel-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("powerpc-unknown-linux-gnu"), + "powerpc-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("powerpc64-unknown-linux-gnu"), + "powerpc64-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("powerpc64le-unknown-linux-gnu"), + "powerpc64le-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("s390x-unknown-linux-gnu"), + "s390x-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("x86_64-unknown-linux-musl"), + "x86_64-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("x86_64-unknown-netbsd"), + "x86_64-netbsd" + ); + assert_eq!( + llvm_triple_to_nix_double("aarch64-linux-android"), + "aarch64-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("arm-linux-androideabi"), + "arm-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("arm-unknown-linux-musleabi"), + "arm-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("arm-unknown-linux-musleabihf"), + "arm-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("armv5te-unknown-linux-gnueabi"), + "armv5te-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("armv5te-unknown-linux-musleabi"), + "armv5te-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("armv7-linux-androideabi"), + "armv7l-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("armv7-unknown-linux-gnueabi"), + "armv7l-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("armv7-unknown-linux-musleabi"), + "armv7l-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("armv7-unknown-linux-musleabihf"), + "armv7l-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("i586-unknown-linux-gnu"), + "i686-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("i586-unknown-linux-musl"), + "i686-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("i686-linux-android"), + "i686-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("i686-unknown-linux-musl"), + "i686-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("mips-unknown-linux-musl"), + "mips-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("mips64-unknown-linux-muslabi64"), + "mips64-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("mips64el-unknown-linux-muslabi64"), + "mips64el-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("mipsel-unknown-linux-musl"), + "mipsel-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("sparc64-unknown-linux-gnu"), + "sparc64-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("thumbv7neon-linux-androideabi"), + "thumbv7neon-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("thumbv7neon-unknown-linux-gnueabihf"), + "thumbv7neon-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("x86_64-linux-android"), + "x86_64-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("x86_64-unknown-linux-gnux32"), + "x86_64-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("aarch64-unknown-linux-gnu_ilp32"), + "aarch64-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("aarch64-unknown-netbsd"), + "aarch64-netbsd" + ); + assert_eq!( + llvm_triple_to_nix_double("aarch64_be-unknown-linux-gnu_ilp32"), + "aarch64_be-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("aarch64_be-unknown-linux-gnu"), + "aarch64_be-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("armeb-unknown-linux-gnueabi"), + "armeb-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("armv4t-unknown-linux-gnueabi"), + "armv4t-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("armv6-unknown-netbsd-eabihf"), + "armv6l-netbsd" + ); + assert_eq!( + llvm_triple_to_nix_double("armv7-unknown-linux-uclibceabi"), + "armv7l-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("armv7-unknown-linux-uclibceabihf"), + "armv7l-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("armv7-unknown-netbsd-eabihf"), + "armv7l-netbsd" + ); + assert_eq!( + llvm_triple_to_nix_double("hexagon-unknown-linux-musl"), + "hexagon-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("i686-unknown-netbsd"), + "i686-netbsd" + ); + assert_eq!( + llvm_triple_to_nix_double("m68k-unknown-linux-gnu"), + "m68k-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("mips-unknown-linux-uclibc"), + "mips-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("mips64-openwrt-linux-musl"), + "mips64-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("mipsel-unknown-linux-uclibc"), + "mipsel-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("mipsisa32r6-unknown-linux-gnu"), + "mipsisa32r6-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("mipsisa32r6el-unknown-linux-gnu"), + "mipsisa32r6el-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("mipsisa64r6-unknown-linux-gnuabi64"), + "mipsisa64r6-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("mipsisa64r6el-unknown-linux-gnuabi64"), + "mipsisa64r6el-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("powerpc-unknown-linux-gnuspe"), + "powerpc-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("powerpc-unknown-linux-musl"), + "powerpc-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("powerpc-unknown-netbsd"), + "powerpc-netbsd" + ); + assert_eq!( + llvm_triple_to_nix_double("powerpc64-unknown-linux-musl"), + "powerpc64-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("powerpc64le-unknown-linux-musl"), + "powerpc64le-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("riscv32gc-unknown-linux-gnu"), + "riscv32gc-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("riscv32gc-unknown-linux-musl"), + "riscv32gc-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("riscv64gc-unknown-linux-musl"), + "riscv64gc-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("s390x-unknown-linux-musl"), + "s390x-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("sparc-unknown-linux-gnu"), + "sparc-linux" + ); + assert_eq!( + llvm_triple_to_nix_double("sparc64-unknown-netbsd"), + "sparc64-netbsd" + ); + assert_eq!( + llvm_triple_to_nix_double("thumbv7neon-unknown-linux-musleabihf"), + "thumbv7neon-linux" + ); + } +} diff --git a/tvix/eval/src/test_utils.rs b/tvix/eval/src/test_utils.rs new file mode 100644 index 000000000000..a7d1c3f96899 --- /dev/null +++ b/tvix/eval/src/test_utils.rs @@ -0,0 +1,8 @@ +use codemap::CodeMap; + +/// Create a dummy [`codemap::Span`] for use in tests +pub(crate) fn dummy_span() -> codemap::Span { + let mut codemap = CodeMap::new(); + let file = codemap.add_file("<dummy>".to_owned(), "<dummy>".to_owned()); + file.span +} diff --git a/tvix/eval/src/tests/mod.rs b/tvix/eval/src/tests/mod.rs new file mode 100644 index 000000000000..f800baf05018 --- /dev/null +++ b/tvix/eval/src/tests/mod.rs @@ -0,0 +1,181 @@ +use builtin_macros::builtins; +use pretty_assertions::assert_eq; +use test_generator::test_resources; + +/// Module for one-off tests which do not follow the rest of the +/// test layout. +mod one_offs; + +#[builtins] +mod mock_builtins { + //! Builtins which are required by language tests, but should not + //! actually exist in //tvix/eval. + use crate::generators::GenCo; + use crate::*; + use genawaiter::rc::Gen; + + #[builtin("derivation")] + async fn builtin_derivation(co: GenCo, input: Value) -> Result<Value, ErrorKind> { + let input = input.to_attrs()?; + let attrs = input.update(NixAttrs::from_iter( + [ + ( + "outPath", + "/nix/store/00000000000000000000000000000000-mock", + ), + ( + "drvPath", + "/nix/store/00000000000000000000000000000000-mock.drv", + ), + ("type", "derivation"), + ] + .into_iter(), + )); + + Ok(Value::Attrs(Box::new(attrs))) + } +} + +fn eval_test(code_path: &str, expect_success: bool) { + let base = code_path + .strip_suffix("nix") + .expect("test files always end in .nix"); + let exp_path = format!("{}exp", base); + let exp_xml_path = std::path::PathBuf::from(format!("{}exp.xml", base)); + + let code = std::fs::read_to_string(code_path).expect("should be able to read test code"); + + if exp_xml_path.exists() { + // We can't test them at the moment because we don't have XML output yet. + // Checking for success / failure only is a bit disingenious. + return; + } + + let mut eval = crate::Evaluation::new_impure(&code, Some(code_path.into())); + eval.strict = true; + eval.builtins.extend(mock_builtins::builtins()); + + let result = eval.evaluate(); + + if expect_success && !result.errors.is_empty() { + panic!( + "{code_path}: evaluation of eval-okay test should succeed, but failed with {:?}", + result.errors, + ); + } + + if !expect_success && !result.errors.is_empty() { + return; + } + + let result_str = result.value.unwrap().to_string(); + + if let Ok(exp) = std::fs::read_to_string(exp_path) { + if expect_success { + assert_eq!( + result_str, + exp.trim(), + "{code_path}: result value representation (left) must match expectation (right)" + ); + } else { + assert_ne!( + result_str, + exp.trim(), + "{code_path}: test passed unexpectedly! consider moving it out of notyetpassing" + ); + } + } else if expect_success { + panic!("{code_path}: should be able to read test expectation"); + } else { + panic!( + "{code_path}: test should have failed, but succeeded with output {}", + result_str + ); + } +} + +// identity-* tests contain Nix code snippets which should evaluate to +// themselves exactly (i.e. literals). +#[test_resources("src/tests/tvix_tests/identity-*.nix")] +fn identity(code_path: &str) { + let code = std::fs::read_to_string(code_path).expect("should be able to read test code"); + + let mut eval = crate::Evaluation::new(&code, None); + eval.strict = true; + eval.io_handle = Box::new(crate::StdIO); + + let result = eval.evaluate(); + assert!( + result.errors.is_empty(), + "evaluation of identity test failed: {:?}", + result.errors + ); + + let result_str = result.value.unwrap().to_string(); + + assert_eq!( + result_str, + code.trim(), + "result value representation (left) must match expectation (right)" + ) +} + +// eval-okay-* tests contain a snippet of Nix code, and an expectation +// of the produced string output of the evaluator. +// +// These evaluations are always supposed to succeed, i.e. all snippets +// are guaranteed to be valid Nix code. +#[test_resources("src/tests/tvix_tests/eval-okay-*.nix")] +fn eval_okay(code_path: &str) { + eval_test(code_path, true) +} + +// eval-okay-* tests from the original Nix test suite. +#[cfg(feature = "nix_tests")] +#[test_resources("src/tests/nix_tests/eval-okay-*.nix")] +fn nix_eval_okay(code_path: &str) { + eval_test(code_path, true) +} + +// eval-okay-* tests from the original Nix test suite which do not yet pass for tvix +// +// Eventually there will be none of these left, and this function +// will disappear :) Until then, to run these tests, use `cargo test +// --features expected_failures`. +// +// Please don't submit failing tests unless they're in +// notyetpassing; this makes the test suite much more useful for +// regression testing, since there should always be zero non-ignored +// failing tests. +// +// Unfortunately test_generator is unmaintained, so the PRs to make +// it understand #[ignored] has been sitting for two years, so we +// can't use `cargo test --include-ignored`, which is the normal way +// of handling this situation. +// +// https://github.com/frehberg/test-generator/pull/10 +// https://github.com/frehberg/test-generator/pull/8 +#[test_resources("src/tests/nix_tests/notyetpassing/eval-okay-*.nix")] +fn nix_eval_okay_currently_failing(code_path: &str) { + eval_test(code_path, false) +} + +#[test_resources("src/tests/tvix_tests/notyetpassing/eval-okay-*.nix")] +fn eval_okay_currently_failing(code_path: &str) { + eval_test(code_path, false) +} + +// eval-fail-* tests contain a snippet of Nix code, which is +// expected to fail evaluation. The exact type of failure +// (assertion, parse error, etc) is not currently checked. +#[test_resources("src/tests/tvix_tests/eval-fail-*.nix")] +fn eval_fail(code_path: &str) { + eval_test(code_path, false) +} + +// eval-fail-* tests from the original Nix test suite. +#[cfg(feature = "nix_tests")] +#[test_resources("src/tests/nix_tests/eval-fail-*.nix")] +fn nix_eval_fail(code_path: &str) { + eval_test(code_path, false) +} diff --git a/tvix/eval/src/tests/nix_tests/README.md b/tvix/eval/src/tests/nix_tests/README.md new file mode 100644 index 000000000000..357f3547da18 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/README.md @@ -0,0 +1,8 @@ +These test definitions are taken from the Nix 2.3 code base, they can +be found upstream at: + + https://github.com/NixOS/nix/tree/2.3.16/tests/lang + +These tests follow the licensing directions of Nix 2.3 itself: + + https://github.com/NixOS/nix/blob/2.3.16/COPYING diff --git a/tvix/eval/src/tests/nix_tests/binary-data b/tvix/eval/src/tests/nix_tests/binary-data new file mode 100644 index 000000000000..06d740502001 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/binary-data Binary files differdiff --git a/tvix/eval/src/tests/nix_tests/data b/tvix/eval/src/tests/nix_tests/data new file mode 100644 index 000000000000..257cc5642cb1 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/data @@ -0,0 +1 @@ +foo diff --git a/tvix/eval/src/tests/nix_tests/dir1/a.nix b/tvix/eval/src/tests/nix_tests/dir1/a.nix new file mode 100644 index 000000000000..231f150c579c --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/dir1/a.nix @@ -0,0 +1 @@ +"a" diff --git a/tvix/eval/src/tests/nix_tests/dir2/a.nix b/tvix/eval/src/tests/nix_tests/dir2/a.nix new file mode 100644 index 000000000000..170df520ab68 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/dir2/a.nix @@ -0,0 +1 @@ +"X" diff --git a/tvix/eval/src/tests/nix_tests/dir2/b.nix b/tvix/eval/src/tests/nix_tests/dir2/b.nix new file mode 100644 index 000000000000..19010cc35ca6 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/dir2/b.nix @@ -0,0 +1 @@ +"b" diff --git a/tvix/eval/src/tests/nix_tests/dir3/a.nix b/tvix/eval/src/tests/nix_tests/dir3/a.nix new file mode 100644 index 000000000000..170df520ab68 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/dir3/a.nix @@ -0,0 +1 @@ +"X" diff --git a/tvix/eval/src/tests/nix_tests/dir3/b.nix b/tvix/eval/src/tests/nix_tests/dir3/b.nix new file mode 100644 index 000000000000..170df520ab68 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/dir3/b.nix @@ -0,0 +1 @@ +"X" diff --git a/tvix/eval/src/tests/nix_tests/dir3/c.nix b/tvix/eval/src/tests/nix_tests/dir3/c.nix new file mode 100644 index 000000000000..cdf158597eef --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/dir3/c.nix @@ -0,0 +1 @@ +"c" diff --git a/tvix/eval/src/tests/nix_tests/dir4/a.nix b/tvix/eval/src/tests/nix_tests/dir4/a.nix new file mode 100644 index 000000000000..170df520ab68 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/dir4/a.nix @@ -0,0 +1 @@ +"X" diff --git a/tvix/eval/src/tests/nix_tests/dir4/c.nix b/tvix/eval/src/tests/nix_tests/dir4/c.nix new file mode 100644 index 000000000000..170df520ab68 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/dir4/c.nix @@ -0,0 +1 @@ +"X" diff --git a/tvix/eval/src/tests/nix_tests/eval-fail-abort.nix b/tvix/eval/src/tests/nix_tests/eval-fail-abort.nix new file mode 100644 index 000000000000..75c51bceb540 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-fail-abort.nix @@ -0,0 +1 @@ +if true then abort "this should fail" else 1 diff --git a/tvix/eval/src/tests/nix_tests/eval-fail-assert.nix b/tvix/eval/src/tests/nix_tests/eval-fail-assert.nix new file mode 100644 index 000000000000..3b7a1e8bf0c2 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-fail-assert.nix @@ -0,0 +1,5 @@ +let { + x = arg: assert arg == "y"; 123; + + body = x "x"; +} \ No newline at end of file diff --git a/tvix/eval/src/tests/nix_tests/eval-fail-bad-antiquote-1.nix b/tvix/eval/src/tests/nix_tests/eval-fail-bad-antiquote-1.nix new file mode 100644 index 000000000000..ffe9c983c26b --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-fail-bad-antiquote-1.nix @@ -0,0 +1 @@ +"${x: x}" diff --git a/tvix/eval/src/tests/nix_tests/eval-fail-bad-antiquote-3.nix b/tvix/eval/src/tests/nix_tests/eval-fail-bad-antiquote-3.nix new file mode 100644 index 000000000000..65b9d4f505b1 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-fail-bad-antiquote-3.nix @@ -0,0 +1 @@ +''${x: x}'' diff --git a/tvix/eval/src/tests/nix_tests/eval-fail-blackhole.nix-disabled b/tvix/eval/src/tests/nix_tests/eval-fail-blackhole.nix-disabled new file mode 100644 index 000000000000..81133b511c95 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-fail-blackhole.nix-disabled @@ -0,0 +1,5 @@ +let { + body = x; + x = y; + y = x; +} diff --git a/tvix/eval/src/tests/nix_tests/eval-fail-deepseq.nix b/tvix/eval/src/tests/nix_tests/eval-fail-deepseq.nix new file mode 100644 index 000000000000..9baa49b063ec --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-fail-deepseq.nix @@ -0,0 +1 @@ +builtins.deepSeq { x = abort "foo"; } 456 diff --git a/tvix/eval/src/tests/nix_tests/eval-fail-division-by-zero-float.nix b/tvix/eval/src/tests/nix_tests/eval-fail-division-by-zero-float.nix new file mode 100644 index 000000000000..82dd6873218e --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-fail-division-by-zero-float.nix @@ -0,0 +1 @@ +1.0 / 0.0 diff --git a/tvix/eval/src/tests/nix_tests/eval-fail-division-by-zero-int.nix b/tvix/eval/src/tests/nix_tests/eval-fail-division-by-zero-int.nix new file mode 100644 index 000000000000..72dca4d5e478 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-fail-division-by-zero-int.nix @@ -0,0 +1 @@ +1 / 0 diff --git a/tvix/eval/src/tests/nix_tests/eval-fail-hashfile-missing.nix b/tvix/eval/src/tests/nix_tests/eval-fail-hashfile-missing.nix new file mode 100644 index 000000000000..ce098b82380a --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-fail-hashfile-missing.nix @@ -0,0 +1,5 @@ +let + paths = [ ./this-file-is-definitely-not-there-7392097 "/and/neither/is/this/37293620" ]; +in + toString (builtins.concatLists (map (hash: map (builtins.hashFile hash) paths) ["md5" "sha1" "sha256" "sha512"])) + diff --git a/tvix/eval/src/tests/nix_tests/eval-fail-missing-arg.nix b/tvix/eval/src/tests/nix_tests/eval-fail-missing-arg.nix new file mode 100644 index 000000000000..c4be9797c534 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-fail-missing-arg.nix @@ -0,0 +1 @@ +({x, y, z}: x + y + z) {x = "foo"; z = "bar";} diff --git a/tvix/eval/src/tests/nix_tests/eval-fail-path-slash.nix b/tvix/eval/src/tests/nix_tests/eval-fail-path-slash.nix new file mode 100644 index 000000000000..8c2e104c788f --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-fail-path-slash.nix @@ -0,0 +1,6 @@ +# Trailing slashes in paths are not allowed. +# This restriction could be lifted sometime, +# for example if we make '/' a path concatenation operator. +# See https://github.com/NixOS/nix/issues/1138 +# and https://nixos.org/nix-dev/2016-June/020829.html +/nix/store/ diff --git a/tvix/eval/src/tests/nix_tests/eval-fail-remove.nix b/tvix/eval/src/tests/nix_tests/eval-fail-remove.nix new file mode 100644 index 000000000000..539e0eb0a6f6 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-fail-remove.nix @@ -0,0 +1,5 @@ +let { + attrs = {x = 123; y = 456;}; + + body = (removeAttrs attrs ["x"]).x; +} \ No newline at end of file diff --git a/tvix/eval/src/tests/nix_tests/eval-fail-seq.nix b/tvix/eval/src/tests/nix_tests/eval-fail-seq.nix new file mode 100644 index 000000000000..cddbbfd3261e --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-fail-seq.nix @@ -0,0 +1 @@ +builtins.seq (abort "foo") 2 diff --git a/tvix/eval/src/tests/nix_tests/eval-fail-substring.nix b/tvix/eval/src/tests/nix_tests/eval-fail-substring.nix new file mode 100644 index 000000000000..f37c2bc0a160 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-fail-substring.nix @@ -0,0 +1 @@ +builtins.substring (builtins.sub 0 1) 1 "x" diff --git a/tvix/eval/src/tests/nix_tests/eval-fail-to-path.nix b/tvix/eval/src/tests/nix_tests/eval-fail-to-path.nix new file mode 100644 index 000000000000..5e322bc31369 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-fail-to-path.nix @@ -0,0 +1 @@ +builtins.toPath "foo/bar" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-any-all.exp b/tvix/eval/src/tests/nix_tests/eval-okay-any-all.exp new file mode 100644 index 000000000000..eb273f45b2a6 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-any-all.exp @@ -0,0 +1 @@ +[ false false true true true true false true ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-any-all.nix b/tvix/eval/src/tests/nix_tests/eval-okay-any-all.nix new file mode 100644 index 000000000000..a3f26ea2aa83 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-any-all.nix @@ -0,0 +1,11 @@ +with builtins; + +[ (any (x: x == 1) []) + (any (x: x == 1) [2 3 4]) + (any (x: x == 1) [1 2 3 4]) + (any (x: x == 1) [4 3 2 1]) + (all (x: x == 1) []) + (all (x: x == 1) [1]) + (all (x: x == 1) [1 2 3]) + (all (x: x == 1) [1 1 1]) +] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-arithmetic.exp b/tvix/eval/src/tests/nix_tests/eval-okay-arithmetic.exp new file mode 100644 index 000000000000..5c54d10b7b47 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-arithmetic.exp @@ -0,0 +1 @@ +2216 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-arithmetic.nix b/tvix/eval/src/tests/nix_tests/eval-okay-arithmetic.nix new file mode 100644 index 000000000000..7e9e6a0b666e --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-arithmetic.nix @@ -0,0 +1,59 @@ +with import ./lib.nix; + +let { + + /* Supposedly tail recursive version: + + range_ = accum: first: last: + if first == last then ([first] ++ accum) + else range_ ([first] ++ accum) (builtins.add first 1) last; + + range = range_ []; + */ + + x = 12; + + err = abort "urgh"; + + body = sum + [ (sum (range 1 50)) + (123 + 456) + (0 + -10 + -(-11) + -x) + (10 - 7 - -2) + (10 - (6 - -1)) + (10 - 1 + 2) + (3 * 4 * 5) + (56088 / 123 / 2) + (3 + 4 * const 5 0 - 6 / id 2) + + (builtins.bitAnd 12 10) # 0b1100 & 0b1010 = 8 + (builtins.bitOr 12 10) # 0b1100 | 0b1010 = 14 + (builtins.bitXor 12 10) # 0b1100 ^ 0b1010 = 6 + + (if 3 < 7 then 1 else err) + (if 7 < 3 then err else 1) + (if 3 < 3 then err else 1) + + (if 3 <= 7 then 1 else err) + (if 7 <= 3 then err else 1) + (if 3 <= 3 then 1 else err) + + (if 3 > 7 then err else 1) + (if 7 > 3 then 1 else err) + (if 3 > 3 then err else 1) + + (if 3 >= 7 then err else 1) + (if 7 >= 3 then 1 else err) + (if 3 >= 3 then 1 else err) + + (if 2 > 1 == 1 < 2 then 1 else err) + (if 1 + 2 * 3 >= 7 then 1 else err) + (if 1 + 2 * 3 < 7 then err else 1) + + # Not integer, but so what. + (if "aa" < "ab" then 1 else err) + (if "aa" < "aa" then err else 1) + (if "foo" < "foobar" then 1 else err) + ]; + +} diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-attrnames.exp b/tvix/eval/src/tests/nix_tests/eval-okay-attrnames.exp new file mode 100644 index 000000000000..b4aa387e07b8 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-attrnames.exp @@ -0,0 +1 @@ +"newxfoonewxy" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-attrnames.nix b/tvix/eval/src/tests/nix_tests/eval-okay-attrnames.nix new file mode 100644 index 000000000000..e5b26e9f2e39 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-attrnames.nix @@ -0,0 +1,11 @@ +with import ./lib.nix; + +let + + attrs = {y = "y"; x = "x"; foo = "foo";} // rec {x = "newx"; bar = x;}; + + names = builtins.attrNames attrs; + + values = map (name: builtins.getAttr name attrs) names; + +in assert values == builtins.attrValues attrs; concat values diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-attrs.exp b/tvix/eval/src/tests/nix_tests/eval-okay-attrs.exp new file mode 100644 index 000000000000..45b0f829eb33 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-attrs.exp @@ -0,0 +1 @@ +987 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-attrs.nix b/tvix/eval/src/tests/nix_tests/eval-okay-attrs.nix new file mode 100644 index 000000000000..810b31a5da96 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-attrs.nix @@ -0,0 +1,5 @@ +let { + as = { x = 123; y = 456; } // { z = 789; } // { z = 987; }; + + body = if as ? a then as.a else assert as ? z; as.z; +} diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-attrs2.exp b/tvix/eval/src/tests/nix_tests/eval-okay-attrs2.exp new file mode 100644 index 000000000000..45b0f829eb33 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-attrs2.exp @@ -0,0 +1 @@ +987 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-attrs2.nix b/tvix/eval/src/tests/nix_tests/eval-okay-attrs2.nix new file mode 100644 index 000000000000..9e06b83ac1fd --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-attrs2.nix @@ -0,0 +1,10 @@ +let { + as = { x = 123; y = 456; } // { z = 789; } // { z = 987; }; + + A = "a"; + Z = "z"; + + body = if builtins.hasAttr A as + then builtins.getAttr A as + else assert builtins.hasAttr Z as; builtins.getAttr Z as; +} diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-attrs3.exp b/tvix/eval/src/tests/nix_tests/eval-okay-attrs3.exp new file mode 100644 index 000000000000..19de4fdf79f7 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-attrs3.exp @@ -0,0 +1 @@ +"foo 22 80 itchyxac" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-attrs3.nix b/tvix/eval/src/tests/nix_tests/eval-okay-attrs3.nix new file mode 100644 index 000000000000..f29de11fe660 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-attrs3.nix @@ -0,0 +1,22 @@ +let + + config = + { + services.sshd.enable = true; + services.sshd.port = 22; + services.httpd.port = 80; + hostName = "itchy"; + a.b.c.d.e.f.g.h.i.j.k.l.m.n.o.p.q.r.s.t.u.v.w.x.y.z = "x"; + foo = { + a = "a"; + b.c = "c"; + }; + }; + +in + if config.services.sshd.enable + then "foo ${toString config.services.sshd.port} ${toString config.services.httpd.port} ${config.hostName}" + + "${config.a.b.c.d.e.f.g.h.i.j.k.l.m.n.o.p.q.r.s.t.u.v.w.x.y.z}" + + "${config.foo.a}" + + "${config.foo.b.c}" + else "bar" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-attrs4.exp b/tvix/eval/src/tests/nix_tests/eval-okay-attrs4.exp new file mode 100644 index 000000000000..1851731442d3 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-attrs4.exp @@ -0,0 +1 @@ +[ true false true false false true false false ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-attrs4.nix b/tvix/eval/src/tests/nix_tests/eval-okay-attrs4.nix new file mode 100644 index 000000000000..43ec81210f38 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-attrs4.nix @@ -0,0 +1,7 @@ +let + + as = { x.y.z = 123; a.b.c = 456; }; + + bs = null; + +in [ (as ? x) (as ? y) (as ? x.y.z) (as ? x.y.z.a) (as ? x.y.a) (as ? a.b.c) (bs ? x) (bs ? x.y.z) ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-attrs5.exp b/tvix/eval/src/tests/nix_tests/eval-okay-attrs5.exp new file mode 100644 index 000000000000..ce0430d78081 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-attrs5.exp @@ -0,0 +1 @@ +[ 123 "foo" 456 456 "foo" "xyzzy" "xyzzy" true ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-attrs5.nix b/tvix/eval/src/tests/nix_tests/eval-okay-attrs5.nix new file mode 100644 index 000000000000..a4584cd3b398 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-attrs5.nix @@ -0,0 +1,21 @@ +with import ./lib.nix; + +let + + as = { x.y.z = 123; a.b.c = 456; }; + + bs = { f-o-o.bar = "foo"; }; + + or = x: y: x || y; + +in + [ as.x.y.z + as.foo or "foo" + as.x.y.bla or as.a.b.c + as.a.b.c or as.x.y.z + as.x.y.bla or bs.f-o-o.bar or "xyzzy" + as.x.y.bla or bs.bar.foo or "xyzzy" + (123).bla or null.foo or "xyzzy" + # Backwards compatibility test. + (fold or [] [true false false]) + ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-backslash-newline-1.exp b/tvix/eval/src/tests/nix_tests/eval-okay-backslash-newline-1.exp new file mode 100644 index 000000000000..3e754364cc9c --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-backslash-newline-1.exp @@ -0,0 +1 @@ +"a\nb" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-backslash-newline-1.nix b/tvix/eval/src/tests/nix_tests/eval-okay-backslash-newline-1.nix new file mode 100644 index 000000000000..7fef3dddd4dd --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-backslash-newline-1.nix @@ -0,0 +1,2 @@ +"a\ +b" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-backslash-newline-2.exp b/tvix/eval/src/tests/nix_tests/eval-okay-backslash-newline-2.exp new file mode 100644 index 000000000000..3e754364cc9c --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-backslash-newline-2.exp @@ -0,0 +1 @@ +"a\nb" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-backslash-newline-2.nix b/tvix/eval/src/tests/nix_tests/eval-okay-backslash-newline-2.nix new file mode 100644 index 000000000000..35ddf495c63b --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-backslash-newline-2.nix @@ -0,0 +1,2 @@ +''a''\ +b'' diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-builtins-add.exp b/tvix/eval/src/tests/nix_tests/eval-okay-builtins-add.exp new file mode 100644 index 000000000000..0350b518a7ec --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-builtins-add.exp @@ -0,0 +1 @@ +[ 5 4 "int" "tt" "float" 4 ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-builtins-add.nix b/tvix/eval/src/tests/nix_tests/eval-okay-builtins-add.nix new file mode 100644 index 000000000000..c841816222a5 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-builtins-add.nix @@ -0,0 +1,8 @@ +[ +(builtins.add 2 3) +(builtins.add 2 2) +(builtins.typeOf (builtins.add 2 2)) +("t" + "t") +(builtins.typeOf (builtins.add 2.0 2)) +(builtins.add 2.0 2) +] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-builtins.exp b/tvix/eval/src/tests/nix_tests/eval-okay-builtins.exp new file mode 100644 index 000000000000..0661686d611d --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-builtins.exp @@ -0,0 +1 @@ +/foo diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-builtins.nix b/tvix/eval/src/tests/nix_tests/eval-okay-builtins.nix new file mode 100644 index 000000000000..e9d65e88a817 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-builtins.nix @@ -0,0 +1,12 @@ +assert builtins ? currentSystem; +assert !builtins ? __currentSystem; + +let { + + x = if builtins ? dirOf then builtins.dirOf /foo/bar else ""; + + y = if builtins ? fnord then builtins.fnord "foo" else ""; + + body = x + y; + +} diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-callable-attrs.exp b/tvix/eval/src/tests/nix_tests/eval-okay-callable-attrs.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-callable-attrs.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-callable-attrs.nix b/tvix/eval/src/tests/nix_tests/eval-okay-callable-attrs.nix new file mode 100644 index 000000000000..310a030df004 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-callable-attrs.nix @@ -0,0 +1 @@ +({ __functor = self: x: self.foo && x; foo = false; } // { foo = true; }) true diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-catattrs.exp b/tvix/eval/src/tests/nix_tests/eval-okay-catattrs.exp new file mode 100644 index 000000000000..b4a1e66d6b8a --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-catattrs.exp @@ -0,0 +1 @@ +[ 1 2 ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-catattrs.nix b/tvix/eval/src/tests/nix_tests/eval-okay-catattrs.nix new file mode 100644 index 000000000000..2c3dc10da528 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-catattrs.nix @@ -0,0 +1 @@ +builtins.catAttrs "a" [ { a = 1; } { b = 0; } { a = 2; } ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-closure.exp b/tvix/eval/src/tests/nix_tests/eval-okay-closure.exp new file mode 100644 index 000000000000..e7dbf978160d --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-closure.exp @@ -0,0 +1 @@ +[ { foo = true; key = -13; } { foo = true; key = -12; } { foo = true; key = -11; } { foo = true; key = -9; } { foo = true; key = -8; } { foo = true; key = -7; } { foo = true; key = -5; } { foo = true; key = -4; } { foo = true; key = -3; } { key = -1; } { foo = true; key = 0; } { foo = true; key = 1; } { foo = true; key = 2; } { foo = true; key = 4; } { foo = true; key = 5; } { foo = true; key = 6; } { key = 8; } { foo = true; key = 9; } { foo = true; key = 10; } { foo = true; key = 13; } { foo = true; key = 14; } { foo = true; key = 15; } { key = 17; } { foo = true; key = 18; } { foo = true; key = 19; } { foo = true; key = 22; } { foo = true; key = 23; } { key = 26; } { foo = true; key = 27; } { foo = true; key = 28; } { foo = true; key = 31; } { foo = true; key = 32; } { key = 35; } { foo = true; key = 36; } { foo = true; key = 40; } { foo = true; key = 41; } { key = 44; } { foo = true; key = 45; } { foo = true; key = 49; } { key = 53; } { foo = true; key = 54; } { foo = true; key = 58; } { key = 62; } { foo = true; key = 67; } { key = 71; } { key = 80; } ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-closure.nix b/tvix/eval/src/tests/nix_tests/eval-okay-closure.nix new file mode 100644 index 000000000000..cccd4dc35730 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-closure.nix @@ -0,0 +1,13 @@ +let + + closure = builtins.genericClosure { + startSet = [{key = 80;}]; + operator = {key, foo ? false}: + if builtins.lessThan key 0 + then [] + else [{key = builtins.sub key 9;} {key = builtins.sub key 13; foo = true;}]; + }; + + sort = (import ./lib.nix).sortBy (a: b: builtins.lessThan a.key b.key); + +in sort closure diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-comments.exp b/tvix/eval/src/tests/nix_tests/eval-okay-comments.exp new file mode 100644 index 000000000000..7182dc2f9b8e --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-comments.exp @@ -0,0 +1 @@ +"abcdefghijklmnopqrstuvwxyz" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-comments.nix b/tvix/eval/src/tests/nix_tests/eval-okay-comments.nix new file mode 100644 index 000000000000..cb2cce218029 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-comments.nix @@ -0,0 +1,59 @@ +# A simple comment +"a"+ # And another +## A double comment +"b"+ ## And another +# Nested # comments # +"c"+ # and # some # other # +# An empty line, following here: + +"d"+ # and a comment not starting the line ! + +"e"+ +/* multiline comments */ +"f" + +/* multiline + comments, + on + multiple + lines +*/ +"g" + +# Small, tricky comments +/**/ "h"+ /*/*/ "i"+ /***/ "j"+ /* /*/ "k"+ /*/* /*/ "l"+ +# Comments with an even number of ending '*' used to fail: +"m"+ +/* */ /* **/ /* ***/ /* ****/ "n"+ +/* */ /** */ /*** */ /**** */ "o"+ +/** **/ /*** ***/ /**** ****/ "p"+ +/* * ** *** **** ***** */ "q"+ +# Random comments +/* ***** ////// * / * / /* */ "r"+ +# Mixed comments +/* # */ +"s"+ +# /* # +"t"+ +# /* # */ +"u"+ +# /*********/ +"v"+ +## */* +"w"+ +/* + * Multiline, decorated comments + * # This ain't a nest'd comm'nt + */ +"x"+ +''${/** with **/"y" + # real + /* comments + inside ! # */ + + # (and empty lines) + +}''+ /* And a multiline comment, + on the same line, + after some spaces +*/ # followed by a one-line comment +"z" +/* EOF */ diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-concat.exp b/tvix/eval/src/tests/nix_tests/eval-okay-concat.exp new file mode 100644 index 000000000000..bb4bbd577410 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-concat.exp @@ -0,0 +1 @@ +[ 1 2 3 4 5 6 7 8 9 ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-concat.nix b/tvix/eval/src/tests/nix_tests/eval-okay-concat.nix new file mode 100644 index 000000000000..d158a9bf05b9 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-concat.nix @@ -0,0 +1 @@ +[1 2 3] ++ [4 5 6] ++ [7 8 9] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-concatmap.exp b/tvix/eval/src/tests/nix_tests/eval-okay-concatmap.exp new file mode 100644 index 000000000000..3b8be7739deb --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-concatmap.exp @@ -0,0 +1 @@ +[ [ 1 3 5 7 9 ] [ "a" "z" "b" "z" ] ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-concatmap.nix b/tvix/eval/src/tests/nix_tests/eval-okay-concatmap.nix new file mode 100644 index 000000000000..97da5d37a412 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-concatmap.nix @@ -0,0 +1,5 @@ +with import ./lib.nix; + +[ (builtins.concatMap (x: if x / 2 * 2 == x then [] else [ x ]) (range 0 10)) + (builtins.concatMap (x: [x] ++ ["z"]) ["a" "b"]) +] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-concatstringssep.exp b/tvix/eval/src/tests/nix_tests/eval-okay-concatstringssep.exp new file mode 100644 index 000000000000..93987647ffe6 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-concatstringssep.exp @@ -0,0 +1 @@ +[ "" "foobarxyzzy" "foo, bar, xyzzy" "foo" "" ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-concatstringssep.nix b/tvix/eval/src/tests/nix_tests/eval-okay-concatstringssep.nix new file mode 100644 index 000000000000..adc4c41bd551 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-concatstringssep.nix @@ -0,0 +1,8 @@ +with builtins; + +[ (concatStringsSep "" []) + (concatStringsSep "" ["foo" "bar" "xyzzy"]) + (concatStringsSep ", " ["foo" "bar" "xyzzy"]) + (concatStringsSep ", " ["foo"]) + (concatStringsSep ", " []) +] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-deepseq.exp b/tvix/eval/src/tests/nix_tests/eval-okay-deepseq.exp new file mode 100644 index 000000000000..8d38505c1686 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-deepseq.exp @@ -0,0 +1 @@ +456 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-deepseq.nix b/tvix/eval/src/tests/nix_tests/eval-okay-deepseq.nix new file mode 100644 index 000000000000..53aa4b1dc251 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-deepseq.nix @@ -0,0 +1 @@ +builtins.deepSeq (let as = { x = 123; y = as; }; in as) 456 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-delayed-with-inherit.exp b/tvix/eval/src/tests/nix_tests/eval-okay-delayed-with-inherit.exp new file mode 100644 index 000000000000..eaacb55c1aff --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-delayed-with-inherit.exp @@ -0,0 +1 @@ +"b-overridden" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-delayed-with-inherit.nix b/tvix/eval/src/tests/nix_tests/eval-okay-delayed-with-inherit.nix new file mode 100644 index 000000000000..84b388c27130 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-delayed-with-inherit.nix @@ -0,0 +1,24 @@ +let + pkgs_ = with pkgs; { + a = derivation { + name = "a"; + system = builtins.currentSystem; + builder = "/bin/sh"; + args = [ "-c" "touch $out" ]; + inherit b; + }; + + inherit b; + }; + + packageOverrides = p: { + b = derivation { + name = "b-overridden"; + system = builtins.currentSystem; + builder = "/bin/sh"; + args = [ "-c" "touch $out" ]; + }; + }; + + pkgs = pkgs_ // (packageOverrides pkgs_); +in pkgs.a.b.name diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-dynamic-attrs-2.exp b/tvix/eval/src/tests/nix_tests/eval-okay-dynamic-attrs-2.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-dynamic-attrs-2.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-dynamic-attrs-2.nix b/tvix/eval/src/tests/nix_tests/eval-okay-dynamic-attrs-2.nix new file mode 100644 index 000000000000..6d57bf854908 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-dynamic-attrs-2.nix @@ -0,0 +1 @@ +{ a."${"b"}" = true; a."${"c"}" = false; }.a.b diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-dynamic-attrs-bare.exp b/tvix/eval/src/tests/nix_tests/eval-okay-dynamic-attrs-bare.exp new file mode 100644 index 000000000000..df8750afc036 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-dynamic-attrs-bare.exp @@ -0,0 +1 @@ +{ binds = true; hasAttrs = true; multiAttrs = true; recBinds = true; selectAttrs = true; selectOrAttrs = true; } diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-dynamic-attrs-bare.nix b/tvix/eval/src/tests/nix_tests/eval-okay-dynamic-attrs-bare.nix new file mode 100644 index 000000000000..0dbe15e6384c --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-dynamic-attrs-bare.nix @@ -0,0 +1,17 @@ +let + aString = "a"; + + bString = "b"; +in { + hasAttrs = { a.b = null; } ? ${aString}.b; + + selectAttrs = { a.b = true; }.a.${bString}; + + selectOrAttrs = { }.${aString} or true; + + binds = { ${aString}."${bString}c" = true; }.a.bc; + + recBinds = rec { ${bString} = a; a = true; }.b; + + multiAttrs = { ${aString} = true; ${bString} = false; }.a; +} diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-dynamic-attrs.exp b/tvix/eval/src/tests/nix_tests/eval-okay-dynamic-attrs.exp new file mode 100644 index 000000000000..df8750afc036 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-dynamic-attrs.exp @@ -0,0 +1 @@ +{ binds = true; hasAttrs = true; multiAttrs = true; recBinds = true; selectAttrs = true; selectOrAttrs = true; } diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-dynamic-attrs.nix b/tvix/eval/src/tests/nix_tests/eval-okay-dynamic-attrs.nix new file mode 100644 index 000000000000..ee02ac7e6579 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-dynamic-attrs.nix @@ -0,0 +1,17 @@ +let + aString = "a"; + + bString = "b"; +in { + hasAttrs = { a.b = null; } ? "${aString}".b; + + selectAttrs = { a.b = true; }.a."${bString}"; + + selectOrAttrs = { }."${aString}" or true; + + binds = { "${aString}"."${bString}c" = true; }.a.bc; + + recBinds = rec { "${bString}" = a; a = true; }.b; + + multiAttrs = { "${aString}" = true; "${bString}" = false; }.a; +} diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-elem.exp b/tvix/eval/src/tests/nix_tests/eval-okay-elem.exp new file mode 100644 index 000000000000..3cf6c0e962f0 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-elem.exp @@ -0,0 +1 @@ +[ true false 30 ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-elem.nix b/tvix/eval/src/tests/nix_tests/eval-okay-elem.nix new file mode 100644 index 000000000000..71ea7a4ed03d --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-elem.nix @@ -0,0 +1,6 @@ +with import ./lib.nix; + +let xs = range 10 40; in + +[ (builtins.elem 23 xs) (builtins.elem 42 xs) (builtins.elemAt xs 20) ] + diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-empty-args.exp b/tvix/eval/src/tests/nix_tests/eval-okay-empty-args.exp new file mode 100644 index 000000000000..cb5537d5d7ce --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-empty-args.exp @@ -0,0 +1 @@ +"ab" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-empty-args.nix b/tvix/eval/src/tests/nix_tests/eval-okay-empty-args.nix new file mode 100644 index 000000000000..78c133afdd94 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-empty-args.nix @@ -0,0 +1 @@ +({}: {x,y,}: "${x}${y}") {} {x = "a"; y = "b";} diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-eq.exp b/tvix/eval/src/tests/nix_tests/eval-okay-eq.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-eq.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-eq.nix b/tvix/eval/src/tests/nix_tests/eval-okay-eq.nix new file mode 100644 index 000000000000..73d200b38141 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-eq.nix @@ -0,0 +1,3 @@ +["foobar" (rec {x = 1; y = x;})] +== +[("foo" + "bar") ({x = 1; y = 1;})] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-filter.exp b/tvix/eval/src/tests/nix_tests/eval-okay-filter.exp new file mode 100644 index 000000000000..355d51c27d8f --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-filter.exp @@ -0,0 +1 @@ +[ 0 2 4 6 8 10 100 102 104 106 108 110 ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-filter.nix b/tvix/eval/src/tests/nix_tests/eval-okay-filter.nix new file mode 100644 index 000000000000..85109b0d0eb8 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-filter.nix @@ -0,0 +1,5 @@ +with import ./lib.nix; + +builtins.filter + (x: x / 2 * 2 == x) + (builtins.concatLists [ (range 0 10) (range 100 110) ]) diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-flatten.exp b/tvix/eval/src/tests/nix_tests/eval-okay-flatten.exp new file mode 100644 index 000000000000..b979b2b8b9bc --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-flatten.exp @@ -0,0 +1 @@ +"1234567" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-flatten.nix b/tvix/eval/src/tests/nix_tests/eval-okay-flatten.nix new file mode 100644 index 000000000000..fe911e9683e2 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-flatten.nix @@ -0,0 +1,8 @@ +with import ./lib.nix; + +let { + + l = ["1" "2" ["3" ["4"] ["5" "6"]] "7"]; + + body = concat (flatten l); +} diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-float.exp b/tvix/eval/src/tests/nix_tests/eval-okay-float.exp new file mode 100644 index 000000000000..3c50a8adce86 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-float.exp @@ -0,0 +1 @@ +[ 3.4 3.5 2.5 1.5 ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-float.nix b/tvix/eval/src/tests/nix_tests/eval-okay-float.nix new file mode 100644 index 000000000000..b2702c7b1668 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-float.nix @@ -0,0 +1,6 @@ +[ + (1.1 + 2.3) + (builtins.add (0.5 + 0.5) (2.0 + 0.5)) + ((0.5 + 0.5) * (2.0 + 0.5)) + ((1.5 + 1.5) / (0.5 * 4.0)) +] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-foldlStrict.exp b/tvix/eval/src/tests/nix_tests/eval-okay-foldlStrict.exp new file mode 100644 index 000000000000..837e12b406f6 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-foldlStrict.exp @@ -0,0 +1 @@ +500500 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-foldlStrict.nix b/tvix/eval/src/tests/nix_tests/eval-okay-foldlStrict.nix new file mode 100644 index 000000000000..3b87188d243b --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-foldlStrict.nix @@ -0,0 +1,3 @@ +with import ./lib.nix; + +builtins.foldl' (x: y: x + y) 0 (range 1 1000) diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-fromTOML.exp b/tvix/eval/src/tests/nix_tests/eval-okay-fromTOML.exp new file mode 100644 index 000000000000..d0dd3af2c814 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-fromTOML.exp @@ -0,0 +1 @@ +[ { clients = { data = [ [ "gamma" "delta" ] [ 1 2 ] ]; hosts = [ "alpha" "omega" ]; }; database = { connection_max = 5000; enabled = true; ports = [ 8001 8001 8002 ]; server = "192.168.1.1"; }; owner = { name = "Tom Preston-Werner"; }; servers = { alpha = { dc = "eqdc10"; ip = "10.0.0.1"; }; beta = { dc = "eqdc10"; ip = "10.0.0.2"; }; }; title = "TOML Example"; } { "1234" = "value"; "127.0.0.1" = "value"; a = { b = { c = { }; }; }; arr1 = [ 1 2 3 ]; arr2 = [ "red" "yellow" "green" ]; arr3 = [ [ 1 2 ] [ 3 4 5 ] ]; arr4 = [ "all" "strings" "are the same" "type" ]; arr5 = [ [ 1 2 ] [ "a" "b" "c" ] ]; arr7 = [ 1 2 3 ]; arr8 = [ 1 2 ]; bare-key = "value"; bare_key = "value"; bin1 = 214; bool1 = true; bool2 = false; "character encoding" = "value"; d = { e = { f = { }; }; }; dog = { "tater.man" = { type = { name = "pug"; }; }; }; flt1 = 1; flt2 = 3.1415; flt3 = -0.01; flt4 = 5e+22; flt5 = 1e+06; flt6 = -0.02; flt7 = 6.626e-34; flt8 = 9.22462e+06; fruit = [ { name = "apple"; physical = { color = "red"; shape = "round"; }; variety = [ { name = "red delicious"; } { name = "granny smith"; } ]; } { name = "banana"; variety = [ { name = "plantain"; } ]; } ]; g = { h = { i = { }; }; }; hex1 = 3735928559; hex2 = 3735928559; hex3 = 3735928559; int1 = 99; int2 = 42; int3 = 0; int4 = -17; int5 = 1000; int6 = 5349221; int7 = 12345; j = { "ʞ" = { l = { }; }; }; key = "value"; key2 = "value"; name = "Orange"; oct1 = 342391; oct2 = 493; physical = { color = "orange"; shape = "round"; }; products = [ { name = "Hammer"; sku = 738594937; } { } { color = "gray"; name = "Nail"; sku = 284758393; } ]; "quoted \"value\"" = "value"; site = { "google.com" = true; }; str = "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF."; table-1 = { key1 = "some string"; key2 = 123; }; table-2 = { key1 = "another string"; key2 = 456; }; x = { y = { z = { w = { animal = { type = { name = "pug"; }; }; name = { first = "Tom"; last = "Preston-Werner"; }; point = { x = 1; y = 2; }; }; }; }; }; "ʎǝʞ" = "value"; } { metadata = { "checksum aho-corasick 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "d6531d44de723825aa81398a6415283229725a00fa30713812ab9323faa82fc4"; "checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"; "checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6"; "checksum arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)" = "a1e964f9e24d588183fcb43503abda40d288c8657dfc27311516ce2f05675aef"; }; package = [ { dependencies = [ "memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" ]; name = "aho-corasick"; source = "registry+https://github.com/rust-lang/crates.io-index"; version = "0.6.4"; } { name = "ansi_term"; source = "registry+https://github.com/rust-lang/crates.io-index"; version = "0.9.0"; } { dependencies = [ "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)" "termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" "winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" ]; name = "atty"; source = "registry+https://github.com/rust-lang/crates.io-index"; version = "0.2.10"; } ]; } { a = [ [ { b = true; } ] ]; c = [ [ { d = true; } ] ]; e = [ [ 123 ] ]; } ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-fromTOML.nix b/tvix/eval/src/tests/nix_tests/eval-okay-fromTOML.nix new file mode 100644 index 000000000000..963932689942 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-fromTOML.nix @@ -0,0 +1,208 @@ +[ + + (builtins.fromTOML '' + # This is a TOML document. + + title = "TOML Example" + + [owner] + name = "Tom Preston-Werner" + #dob = 1979-05-27T07:32:00-08:00 # First class dates + + [database] + server = "192.168.1.1" + ports = [ 8001, 8001, 8002 ] + connection_max = 5000 + enabled = true + + [servers] + + # Indentation (tabs and/or spaces) is allowed but not required + [servers.alpha] + ip = "10.0.0.1" + dc = "eqdc10" + + [servers.beta] + ip = "10.0.0.2" + dc = "eqdc10" + + [clients] + data = [ ["gamma", "delta"], [1, 2] ] + + # Line breaks are OK when inside arrays + hosts = [ + "alpha", + "omega" + ] + '') + + (builtins.fromTOML '' + key = "value" + bare_key = "value" + bare-key = "value" + 1234 = "value" + + "127.0.0.1" = "value" + "character encoding" = "value" + "ʎǝʞ" = "value" + 'key2' = "value" + 'quoted "value"' = "value" + + name = "Orange" + + physical.color = "orange" + physical.shape = "round" + site."google.com" = true + + # This is legal according to the spec, but cpptoml doesn't handle it. + #a.b.c = 1 + #a.d = 2 + + str = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF." + + int1 = +99 + int2 = 42 + int3 = 0 + int4 = -17 + int5 = 1_000 + int6 = 5_349_221 + int7 = 1_2_3_4_5 + + hex1 = 0xDEADBEEF + hex2 = 0xdeadbeef + hex3 = 0xdead_beef + + oct1 = 0o01234567 + oct2 = 0o755 + + bin1 = 0b11010110 + + flt1 = +1.0 + flt2 = 3.1415 + flt3 = -0.01 + flt4 = 5e+22 + flt5 = 1e6 + flt6 = -2E-2 + flt7 = 6.626e-34 + flt8 = 9_224_617.445_991_228_313 + + bool1 = true + bool2 = false + + # FIXME: not supported because Nix doesn't have a date/time type. + #odt1 = 1979-05-27T07:32:00Z + #odt2 = 1979-05-27T00:32:00-07:00 + #odt3 = 1979-05-27T00:32:00.999999-07:00 + #odt4 = 1979-05-27 07:32:00Z + #ldt1 = 1979-05-27T07:32:00 + #ldt2 = 1979-05-27T00:32:00.999999 + #ld1 = 1979-05-27 + #lt1 = 07:32:00 + #lt2 = 00:32:00.999999 + + arr1 = [ 1, 2, 3 ] + arr2 = [ "red", "yellow", "green" ] + arr3 = [ [ 1, 2 ], [3, 4, 5] ] + arr4 = [ "all", 'strings', """are the same""", ''''type''''] + arr5 = [ [ 1, 2 ], ["a", "b", "c"] ] + + arr7 = [ + 1, 2, 3 + ] + + arr8 = [ + 1, + 2, # this is ok + ] + + [table-1] + key1 = "some string" + key2 = 123 + + + [table-2] + key1 = "another string" + key2 = 456 + + [dog."tater.man"] + type.name = "pug" + + [a.b.c] + [ d.e.f ] + [ g . h . i ] + [ j . "ʞ" . 'l' ] + [x.y.z.w] + + name = { first = "Tom", last = "Preston-Werner" } + point = { x = 1, y = 2 } + animal = { type.name = "pug" } + + [[products]] + name = "Hammer" + sku = 738594937 + + [[products]] + + [[products]] + name = "Nail" + sku = 284758393 + color = "gray" + + [[fruit]] + name = "apple" + + [fruit.physical] + color = "red" + shape = "round" + + [[fruit.variety]] + name = "red delicious" + + [[fruit.variety]] + name = "granny smith" + + [[fruit]] + name = "banana" + + [[fruit.variety]] + name = "plantain" + '') + + (builtins.fromTOML '' + [[package]] + name = "aho-corasick" + version = "0.6.4" + source = "registry+https://github.com/rust-lang/crates.io-index" + dependencies = [ + "memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + ] + + [[package]] + name = "ansi_term" + version = "0.9.0" + source = "registry+https://github.com/rust-lang/crates.io-index" + + [[package]] + name = "atty" + version = "0.2.10" + source = "registry+https://github.com/rust-lang/crates.io-index" + dependencies = [ + "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", + "termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + ] + + [metadata] + "checksum aho-corasick 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "d6531d44de723825aa81398a6415283229725a00fa30713812ab9323faa82fc4" + "checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" + "checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6" + "checksum arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)" = "a1e964f9e24d588183fcb43503abda40d288c8657dfc27311516ce2f05675aef" + '') + + (builtins.fromTOML '' + a = [[{ b = true }]] + c = [ [ { d = true } ] ] + e = [[123]] + '') + +] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-fromjson-escapes.exp b/tvix/eval/src/tests/nix_tests/eval-okay-fromjson-escapes.exp new file mode 100644 index 000000000000..add5505a8287 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-fromjson-escapes.exp @@ -0,0 +1 @@ +"quote \" reverse solidus \\ solidus / backspace formfeed newline \n carriage return \r horizontal tab \t 1 char unicode encoded backspace 1 char unicode encoded e with accent é 2 char unicode encoded s with caron š 3 char unicode encoded rightwards arrow →" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-fromjson-escapes.nix b/tvix/eval/src/tests/nix_tests/eval-okay-fromjson-escapes.nix new file mode 100644 index 000000000000..f00713507732 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-fromjson-escapes.nix @@ -0,0 +1,3 @@ +# This string contains all supported escapes in a JSON string, per json.org +# \b and \f are not supported by Nix +builtins.fromJSON ''"quote \" reverse solidus \\ solidus \/ backspace \b formfeed \f newline \n carriage return \r horizontal tab \t 1 char unicode encoded backspace \u0008 1 char unicode encoded e with accent \u00e9 2 char unicode encoded s with caron \u0161 3 char unicode encoded rightwards arrow \u2192"'' diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-fromjson.exp b/tvix/eval/src/tests/nix_tests/eval-okay-fromjson.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-fromjson.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-fromjson.nix b/tvix/eval/src/tests/nix_tests/eval-okay-fromjson.nix new file mode 100644 index 000000000000..e1c0f86cc4e4 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-fromjson.nix @@ -0,0 +1,35 @@ +builtins.fromJSON + '' + { + "Video": { + "Title": "The Penguin Chronicles", + "Width": 1920, + "Height": 1080, + "EmbeddedData": [3.14159, 23493,null, true ,false, -10], + "Thumb": { + "Url": "http://www.example.com/video/5678931", + "Width": 200, + "Height": 250 + }, + "Subtitle" : false, + "Latitude": 46.2051, + "Longitude": 6.0723 + } + } + '' +== + { Video = + { Title = "The Penguin Chronicles"; + Width = 1920; + Height = 1080; + EmbeddedData = [ 3.14159 23493 null true false (0-10) ]; + Thumb = + { Url = "http://www.example.com/video/5678931"; + Width = 200; + Height = 250; + }; + Subtitle = false; + Latitude = 46.2051; + Longitude = 6.0723; + }; + } diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-hash.exp b/tvix/eval/src/tests/nix_tests/eval-okay-hash.exp new file mode 100644 index 000000000000..e69de29bb2d1 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-hash.exp diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-if.exp b/tvix/eval/src/tests/nix_tests/eval-okay-if.exp new file mode 100644 index 000000000000..00750edc07d6 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-if.exp @@ -0,0 +1 @@ +3 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-if.nix b/tvix/eval/src/tests/nix_tests/eval-okay-if.nix new file mode 100644 index 000000000000..23e4c74d5016 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-if.nix @@ -0,0 +1 @@ +if "foo" != "f" + "oo" then 1 else if false then 2 else 3 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-ind-string.exp b/tvix/eval/src/tests/nix_tests/eval-okay-ind-string.exp new file mode 100644 index 000000000000..7862331fa551 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-ind-string.exp @@ -0,0 +1 @@ +"This is an indented multi-line string\nliteral. An amount of whitespace at\nthe start of each line matching the minimum\nindentation of all lines in the string\nliteral together will be removed. Thus,\nin this case four spaces will be\nstripped from each line, even though\n THIS LINE is indented six spaces.\n\nAlso, empty lines don't count in the\ndetermination of the indentation level (the\nprevious empty line has indentation 0, but\nit doesn't matter).\nIf the string starts with whitespace\n followed by a newline, it's stripped, but\n that's not the case here. Two spaces are\n stripped because of the \" \" at the start. \nThis line is indented\na bit further.\nAnti-quotations, like so, are\nalso allowed.\n The \\ is not special here.\n' can be followed by any character except another ', e.g. 'x'.\nLikewise for $, e.g. $$ or $varName.\nBut ' followed by ' is special, as is $ followed by {.\nIf you want them, use anti-quotations: '', \${.\n Tabs are not interpreted as whitespace (since we can't guess\n what tab settings are intended), so don't use them.\n\tThis line starts with a space and a tab, so only one\n space will be stripped from each line.\nAlso note that if the last line (just before the closing ' ')\nconsists only of whitespace, it's ignored. But here there is\nsome non-whitespace stuff, so the line isn't removed. \nThis shows a hacky way to preserve an empty line after the start.\nBut there's no reason to do so: you could just repeat the empty\nline.\n Similarly you can force an indentation level,\n in this case to 2 spaces. This works because the anti-quote\n is significant (not whitespace).\nstart on network-interfaces\n\nstart script\n\n rm -f /var/run/opengl-driver\n ln -sf 123 /var/run/opengl-driver\n\n rm -f /var/log/slim.log\n \nend script\n\nenv SLIM_CFGFILE=abc\nenv SLIM_THEMESDIR=def\nenv FONTCONFIG_FILE=/etc/fonts/fonts.conf \t\t\t\t# !!! cleanup\nenv XKB_BINDIR=foo/bin \t\t\t\t# Needed for the Xkb extension.\nenv LD_LIBRARY_PATH=libX11/lib:libXext/lib:/usr/lib/ # related to xorg-sys-opengl - needed to load libglx for (AI)GLX support (for compiz)\n\nenv XORG_DRI_DRIVER_PATH=nvidiaDrivers/X11R6/lib/modules/drivers/ \n\nexec slim/bin/slim\nEscaping of ' followed by ': ''\nEscaping of $ followed by {: \${\nAnd finally to interpret \\n etc. as in a string: \n, \r, \t.\nfoo\n'bla'\nbar\ncut -d $'\\t' -f 1\nending dollar $$\n" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-ind-string.nix b/tvix/eval/src/tests/nix_tests/eval-okay-ind-string.nix new file mode 100644 index 000000000000..1669dc0648ea --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-ind-string.nix @@ -0,0 +1,128 @@ +let + + s1 = '' + This is an indented multi-line string + literal. An amount of whitespace at + the start of each line matching the minimum + indentation of all lines in the string + literal together will be removed. Thus, + in this case four spaces will be + stripped from each line, even though + THIS LINE is indented six spaces. + + Also, empty lines don't count in the + determination of the indentation level (the + previous empty line has indentation 0, but + it doesn't matter). + ''; + + s2 = '' If the string starts with whitespace + followed by a newline, it's stripped, but + that's not the case here. Two spaces are + stripped because of the " " at the start. + ''; + + s3 = '' + This line is indented + a bit further. + ''; # indentation of last line doesn't count if it's empty + + s4 = '' + Anti-quotations, like ${if true then "so" else "not so"}, are + also allowed. + ''; + + s5 = '' + The \ is not special here. + ' can be followed by any character except another ', e.g. 'x'. + Likewise for $, e.g. $$ or $varName. + But ' followed by ' is special, as is $ followed by {. + If you want them, use anti-quotations: ${"''"}, ${"\${"}. + ''; + + s6 = '' + Tabs are not interpreted as whitespace (since we can't guess + what tab settings are intended), so don't use them. + This line starts with a space and a tab, so only one + space will be stripped from each line. + ''; + + s7 = '' + Also note that if the last line (just before the closing ' ') + consists only of whitespace, it's ignored. But here there is + some non-whitespace stuff, so the line isn't removed. ''; + + s8 = '' ${""} + This shows a hacky way to preserve an empty line after the start. + But there's no reason to do so: you could just repeat the empty + line. + ''; + + s9 = '' + ${""} Similarly you can force an indentation level, + in this case to 2 spaces. This works because the anti-quote + is significant (not whitespace). + ''; + + s10 = '' + ''; + + s11 = ''''; + + s12 = '' ''; + + s13 = '' + start on network-interfaces + + start script + + rm -f /var/run/opengl-driver + ${if true + then "ln -sf 123 /var/run/opengl-driver" + else if true + then "ln -sf 456 /var/run/opengl-driver" + else "" + } + + rm -f /var/log/slim.log + + end script + + env SLIM_CFGFILE=${"abc"} + env SLIM_THEMESDIR=${"def"} + env FONTCONFIG_FILE=/etc/fonts/fonts.conf # !!! cleanup + env XKB_BINDIR=${"foo"}/bin # Needed for the Xkb extension. + env LD_LIBRARY_PATH=${"libX11"}/lib:${"libXext"}/lib:/usr/lib/ # related to xorg-sys-opengl - needed to load libglx for (AI)GLX support (for compiz) + + ${if true + then "env XORG_DRI_DRIVER_PATH=${"nvidiaDrivers"}/X11R6/lib/modules/drivers/" + else if true + then "env XORG_DRI_DRIVER_PATH=${"mesa"}/lib/modules/dri" + else "" + } + + exec ${"slim"}/bin/slim + ''; + + s14 = '' + Escaping of ' followed by ': ''' + Escaping of $ followed by {: ''${ + And finally to interpret \n etc. as in a string: ''\n, ''\r, ''\t. + ''; + + # Regression test: antiquotation in '${x}' should work, but didn't. + s15 = let x = "bla"; in '' + foo + '${x}' + bar + ''; + + # Regression test: accept $'. + s16 = '' + cut -d $'\t' -f 1 + ''; + + # Accept dollars at end of strings + s17 = ''ending dollar $'' + ''$'' + "\n"; + +in s1 + s2 + s3 + s4 + s5 + s6 + s7 + s8 + s9 + s10 + s11 + s12 + s13 + s14 + s15 + s16 + s17 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-let.exp b/tvix/eval/src/tests/nix_tests/eval-okay-let.exp new file mode 100644 index 000000000000..14e24d419005 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-let.exp @@ -0,0 +1 @@ +"foobar" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-let.nix b/tvix/eval/src/tests/nix_tests/eval-okay-let.nix new file mode 100644 index 000000000000..fe118c5282e3 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-let.nix @@ -0,0 +1,5 @@ +let { + x = "foo"; + y = "bar"; + body = x + y; +} diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-list.exp b/tvix/eval/src/tests/nix_tests/eval-okay-list.exp new file mode 100644 index 000000000000..f784f26d83f4 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-list.exp @@ -0,0 +1 @@ +"foobarblatest" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-list.nix b/tvix/eval/src/tests/nix_tests/eval-okay-list.nix new file mode 100644 index 000000000000..d433bcf908ba --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-list.nix @@ -0,0 +1,7 @@ +with import ./lib.nix; + +let { + + body = concat ["foo" "bar" "bla" "test"]; + +} \ No newline at end of file diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-listtoattrs.exp b/tvix/eval/src/tests/nix_tests/eval-okay-listtoattrs.exp new file mode 100644 index 000000000000..74abef7bc6ed --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-listtoattrs.exp @@ -0,0 +1 @@ +"AAbar" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-listtoattrs.nix b/tvix/eval/src/tests/nix_tests/eval-okay-listtoattrs.nix new file mode 100644 index 000000000000..4186e029b538 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-listtoattrs.nix @@ -0,0 +1,11 @@ +# this test shows how to use listToAttrs and that evaluation is still lazy (throw isn't called) +with import ./lib.nix; + +let + asi = name: value : { inherit name value; }; + list = [ ( asi "a" "A" ) ( asi "b" "B" ) ]; + a = builtins.listToAttrs list; + b = builtins.listToAttrs ( list ++ list ); + r = builtins.listToAttrs [ (asi "result" [ a b ]) ( asi "throw" (throw "this should not be thrown")) ]; + x = builtins.listToAttrs [ (asi "foo" "bar") (asi "foo" "bla") ]; +in concat (map (x: x.a) r.result) + x.foo diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-logic.exp b/tvix/eval/src/tests/nix_tests/eval-okay-logic.exp new file mode 100644 index 000000000000..d00491fd7e5b --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-logic.exp @@ -0,0 +1 @@ +1 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-logic.nix b/tvix/eval/src/tests/nix_tests/eval-okay-logic.nix new file mode 100644 index 000000000000..fbb12794401f --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-logic.nix @@ -0,0 +1 @@ +assert !false && (true || false) -> true; 1 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-map.exp b/tvix/eval/src/tests/nix_tests/eval-okay-map.exp new file mode 100644 index 000000000000..dbb64f717b96 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-map.exp @@ -0,0 +1 @@ +"foobarblabarxyzzybar" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-map.nix b/tvix/eval/src/tests/nix_tests/eval-okay-map.nix new file mode 100644 index 000000000000..a76c1d811454 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-map.nix @@ -0,0 +1,3 @@ +with import ./lib.nix; + +concat (map (x: x + "bar") [ "foo" "bla" "xyzzy" ]) \ No newline at end of file diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-mapattrs.exp b/tvix/eval/src/tests/nix_tests/eval-okay-mapattrs.exp new file mode 100644 index 000000000000..3f113f17bab1 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-mapattrs.exp @@ -0,0 +1 @@ +{ x = "x-foo"; y = "y-bar"; } diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-mapattrs.nix b/tvix/eval/src/tests/nix_tests/eval-okay-mapattrs.nix new file mode 100644 index 000000000000..f075b6275e5a --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-mapattrs.nix @@ -0,0 +1,3 @@ +with import ./lib.nix; + +builtins.mapAttrs (name: value: name + "-" + value) { x = "foo"; y = "bar"; } diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-nested-with.exp b/tvix/eval/src/tests/nix_tests/eval-okay-nested-with.exp new file mode 100644 index 000000000000..0cfbf08886fc --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-nested-with.exp @@ -0,0 +1 @@ +2 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-nested-with.nix b/tvix/eval/src/tests/nix_tests/eval-okay-nested-with.nix new file mode 100644 index 000000000000..ba9d79aa79b1 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-nested-with.nix @@ -0,0 +1,3 @@ +with { x = 1; }; +with { x = 2; }; +x diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-new-let.exp b/tvix/eval/src/tests/nix_tests/eval-okay-new-let.exp new file mode 100644 index 000000000000..f98b388071c2 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-new-let.exp @@ -0,0 +1 @@ +"xyzzyfoobar" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-new-let.nix b/tvix/eval/src/tests/nix_tests/eval-okay-new-let.nix new file mode 100644 index 000000000000..738123141508 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-new-let.nix @@ -0,0 +1,14 @@ +let + + f = z: + + let + x = "foo"; + y = "bar"; + body = 1; # compat test + in + z + x + y; + + arg = "xyzzy"; + +in f arg diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-null-dynamic-attrs.exp b/tvix/eval/src/tests/nix_tests/eval-okay-null-dynamic-attrs.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-null-dynamic-attrs.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-null-dynamic-attrs.nix b/tvix/eval/src/tests/nix_tests/eval-okay-null-dynamic-attrs.nix new file mode 100644 index 000000000000..b060c0bc9850 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-null-dynamic-attrs.nix @@ -0,0 +1 @@ +{ ${null} = true; } == {} diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-partition.exp b/tvix/eval/src/tests/nix_tests/eval-okay-partition.exp new file mode 100644 index 000000000000..cd8b8b020c05 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-partition.exp @@ -0,0 +1 @@ +{ right = [ 0 2 4 6 8 10 100 102 104 106 108 110 ]; wrong = [ 1 3 5 7 9 101 103 105 107 109 ]; } diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-partition.nix b/tvix/eval/src/tests/nix_tests/eval-okay-partition.nix new file mode 100644 index 000000000000..846d2ce49486 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-partition.nix @@ -0,0 +1,5 @@ +with import ./lib.nix; + +builtins.partition + (x: x / 2 * 2 == x) + (builtins.concatLists [ (range 0 10) (range 100 110) ]) diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-pathexists.exp b/tvix/eval/src/tests/nix_tests/eval-okay-pathexists.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-pathexists.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-pathexists.nix b/tvix/eval/src/tests/nix_tests/eval-okay-pathexists.nix new file mode 100644 index 000000000000..50c28ee0cd30 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-pathexists.nix @@ -0,0 +1,5 @@ +builtins.pathExists (builtins.toPath ./lib.nix) +&& builtins.pathExists (builtins.toPath (builtins.toString ./lib.nix)) +&& !builtins.pathExists (builtins.toPath (builtins.toString ./bla.nix)) +&& builtins.pathExists ./lib.nix +&& !builtins.pathExists ./bla.nix diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-patterns.exp b/tvix/eval/src/tests/nix_tests/eval-okay-patterns.exp new file mode 100644 index 000000000000..a4304010fe80 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-patterns.exp @@ -0,0 +1 @@ +"abcxyzDDDDEFijk" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-patterns.nix b/tvix/eval/src/tests/nix_tests/eval-okay-patterns.nix new file mode 100644 index 000000000000..96fd25a01517 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-patterns.nix @@ -0,0 +1,16 @@ +let + + f = args@{x, y, z}: x + args.y + z; + + g = {x, y, z}@args: f args; + + h = {x ? "d", y ? x, z ? args.x}@args: x + y + z; + + j = {x, y, z, ...}: x + y + z; + +in + f {x = "a"; y = "b"; z = "c";} + + g {x = "x"; y = "y"; z = "z";} + + h {x = "D";} + + h {x = "D"; y = "E"; z = "F";} + + j {x = "i"; y = "j"; z = "k"; bla = "bla"; foo = "bar";} diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-readfile.exp b/tvix/eval/src/tests/nix_tests/eval-okay-readfile.exp new file mode 100644 index 000000000000..a2c87d0c439f --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-readfile.exp @@ -0,0 +1 @@ +"builtins.readFile ./eval-okay-readfile.nix\n" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-readfile.nix b/tvix/eval/src/tests/nix_tests/eval-okay-readfile.nix new file mode 100644 index 000000000000..82f7cb17435a --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-readfile.nix @@ -0,0 +1 @@ +builtins.readFile ./eval-okay-readfile.nix diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-redefine-builtin.exp b/tvix/eval/src/tests/nix_tests/eval-okay-redefine-builtin.exp new file mode 100644 index 000000000000..c508d5366f70 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-redefine-builtin.exp @@ -0,0 +1 @@ +false diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-redefine-builtin.nix b/tvix/eval/src/tests/nix_tests/eval-okay-redefine-builtin.nix new file mode 100644 index 000000000000..df9fc3f37d22 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-redefine-builtin.nix @@ -0,0 +1,3 @@ +let + throw = abort "Error!"; +in (builtins.tryEval <foobaz>).success diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-regex-match.exp b/tvix/eval/src/tests/nix_tests/eval-okay-regex-match.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-regex-match.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-regex-match.nix b/tvix/eval/src/tests/nix_tests/eval-okay-regex-match.nix new file mode 100644 index 000000000000..273e2590713e --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-regex-match.nix @@ -0,0 +1,29 @@ +with builtins; + +let + + matches = pat: s: match pat s != null; + + splitFN = match "((.*)/)?([^/]*)\\.(nix|cc)"; + +in + +assert matches "foobar" "foobar"; +assert matches "fo*" "f"; +assert !matches "fo+" "f"; +assert matches "fo*" "fo"; +assert matches "fo*" "foo"; +assert matches "fo+" "foo"; +assert matches "fo{1,2}" "foo"; +assert !matches "fo{1,2}" "fooo"; +assert !matches "fo*" "foobar"; +assert matches "[[:space:]]+([^[:space:]]+)[[:space:]]+" " foo "; +assert !matches "[[:space:]]+([[:upper:]]+)[[:space:]]+" " foo "; + +assert match "(.*)\\.nix" "foobar.nix" == [ "foobar" ]; +assert match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " FOO " == [ "FOO" ]; + +assert splitFN "/path/to/foobar.nix" == [ "/path/to/" "/path/to" "foobar" "nix" ]; +assert splitFN "foobar.cc" == [ null null "foobar" "cc" ]; + +true diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-regex-split.exp b/tvix/eval/src/tests/nix_tests/eval-okay-regex-split.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-regex-split.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-regex-split.nix b/tvix/eval/src/tests/nix_tests/eval-okay-regex-split.nix new file mode 100644 index 000000000000..0073e057787d --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-regex-split.nix @@ -0,0 +1,48 @@ +with builtins; + +# Non capturing regex returns empty lists +assert split "foobar" "foobar" == ["" [] ""]; +assert split "fo*" "f" == ["" [] ""]; +assert split "fo+" "f" == ["f"]; +assert split "fo*" "fo" == ["" [] ""]; +assert split "fo*" "foo" == ["" [] ""]; +assert split "fo+" "foo" == ["" [] ""]; +assert split "fo{1,2}" "foo" == ["" [] ""]; +assert split "fo{1,2}" "fooo" == ["" [] "o"]; +assert split "fo*" "foobar" == ["" [] "bar"]; + +# Capturing regex returns a list of sub-matches +assert split "(fo*)" "f" == ["" ["f"] ""]; +assert split "(fo+)" "f" == ["f"]; +assert split "(fo*)" "fo" == ["" ["fo"] ""]; +assert split "(f)(o*)" "f" == ["" ["f" ""] ""]; +assert split "(f)(o*)" "foo" == ["" ["f" "oo"] ""]; +assert split "(fo+)" "foo" == ["" ["foo"] ""]; +assert split "(fo{1,2})" "foo" == ["" ["foo"] ""]; +assert split "(fo{1,2})" "fooo" == ["" ["foo"] "o"]; +assert split "(fo*)" "foobar" == ["" ["foo"] "bar"]; + +# Matches are greedy. +assert split "(o+)" "oooofoooo" == ["" ["oooo"] "f" ["oooo"] ""]; + +# Matches multiple times. +assert split "(b)" "foobarbaz" == ["foo" ["b"] "ar" ["b"] "az"]; + +# Split large strings containing newlines. null are inserted when a +# pattern within the current did not match anything. +assert split "[[:space:]]+|([',.!?])" '' + Nix Rocks! + That's why I use it. +'' == [ + "Nix" [ null ] "Rocks" ["!"] "" [ null ] + "That" ["'"] "s" [ null ] "why" [ null ] "I" [ null ] "use" [ null ] "it" ["."] "" [ null ] + "" +]; + +# Documentation examples +assert split "(a)b" "abc" == [ "" [ "a" ] "c" ]; +assert split "([ac])" "abc" == [ "" [ "a" ] "b" [ "c" ] "" ]; +assert split "(a)|(c)" "abc" == [ "" [ "a" null ] "b" [ null "c" ] "" ]; +assert split "([[:upper:]]+)" " FOO " == [ " " [ "FOO" ] " " ]; + +true diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-regression-20220122.exp b/tvix/eval/src/tests/nix_tests/eval-okay-regression-20220122.exp new file mode 100644 index 000000000000..00750edc07d6 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-regression-20220122.exp @@ -0,0 +1 @@ +3 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-regression-20220122.nix b/tvix/eval/src/tests/nix_tests/eval-okay-regression-20220122.nix new file mode 100644 index 000000000000..694e9a13b7c5 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-regression-20220122.nix @@ -0,0 +1 @@ +((_: _) 1) + ((__: __) 2) diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-regression-20220125.exp b/tvix/eval/src/tests/nix_tests/eval-okay-regression-20220125.exp new file mode 100644 index 000000000000..00750edc07d6 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-regression-20220125.exp @@ -0,0 +1 @@ +3 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-regression-20220125.nix b/tvix/eval/src/tests/nix_tests/eval-okay-regression-20220125.nix new file mode 100644 index 000000000000..48550237394a --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-regression-20220125.nix @@ -0,0 +1,2 @@ +((__curPosFoo: __curPosFoo) 1) + ((__curPosBar: __curPosBar) 2) + diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-remove.exp b/tvix/eval/src/tests/nix_tests/eval-okay-remove.exp new file mode 100644 index 000000000000..8d38505c1686 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-remove.exp @@ -0,0 +1 @@ +456 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-remove.nix b/tvix/eval/src/tests/nix_tests/eval-okay-remove.nix new file mode 100644 index 000000000000..4ad5ba897fa7 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-remove.nix @@ -0,0 +1,5 @@ +let { + attrs = {x = 123; y = 456;}; + + body = (removeAttrs attrs ["x"]).y; +} \ No newline at end of file diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-replacestrings.exp b/tvix/eval/src/tests/nix_tests/eval-okay-replacestrings.exp new file mode 100644 index 000000000000..72e8274d8c58 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-replacestrings.exp @@ -0,0 +1 @@ +[ "faabar" "fbar" "fubar" "faboor" "fubar" "XaXbXcX" "X" "a_b" ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-replacestrings.nix b/tvix/eval/src/tests/nix_tests/eval-okay-replacestrings.nix new file mode 100644 index 000000000000..bd8031fc004e --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-replacestrings.nix @@ -0,0 +1,11 @@ +with builtins; + +[ (replaceStrings ["o"] ["a"] "foobar") + (replaceStrings ["o"] [""] "foobar") + (replaceStrings ["oo"] ["u"] "foobar") + (replaceStrings ["oo" "a"] ["a" "oo"] "foobar") + (replaceStrings ["oo" "oo"] ["u" "i"] "foobar") + (replaceStrings [""] ["X"] "abc") + (replaceStrings [""] ["X"] "") + (replaceStrings ["-"] ["_"] "a-b") +] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-scope-1.exp b/tvix/eval/src/tests/nix_tests/eval-okay-scope-1.exp new file mode 100644 index 000000000000..00750edc07d6 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-scope-1.exp @@ -0,0 +1 @@ +3 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-scope-1.nix b/tvix/eval/src/tests/nix_tests/eval-okay-scope-1.nix new file mode 100644 index 000000000000..fa38a7174e03 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-scope-1.nix @@ -0,0 +1,6 @@ +(({x}: x: + + { x = 1; + y = x; + } +) {x = 2;} 3).y diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-scope-2.exp b/tvix/eval/src/tests/nix_tests/eval-okay-scope-2.exp new file mode 100644 index 000000000000..d00491fd7e5b --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-scope-2.exp @@ -0,0 +1 @@ +1 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-scope-2.nix b/tvix/eval/src/tests/nix_tests/eval-okay-scope-2.nix new file mode 100644 index 000000000000..eb8b02bc4994 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-scope-2.nix @@ -0,0 +1,6 @@ +((x: {x}: + rec { + x = 1; + y = x; + } +) 2 {x = 3;}).y diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-scope-3.exp b/tvix/eval/src/tests/nix_tests/eval-okay-scope-3.exp new file mode 100644 index 000000000000..b8626c4cff28 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-scope-3.exp @@ -0,0 +1 @@ +4 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-scope-3.nix b/tvix/eval/src/tests/nix_tests/eval-okay-scope-3.nix new file mode 100644 index 000000000000..10d6bc04d830 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-scope-3.nix @@ -0,0 +1,6 @@ +((x: as: {x}: + rec { + inherit (as) x; + y = x; + } +) 2 {x = 4;} {x = 3;}).y diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-scope-4.exp b/tvix/eval/src/tests/nix_tests/eval-okay-scope-4.exp new file mode 100644 index 000000000000..00ff03a46c9b --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-scope-4.exp @@ -0,0 +1 @@ +"ccdd" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-scope-4.nix b/tvix/eval/src/tests/nix_tests/eval-okay-scope-4.nix new file mode 100644 index 000000000000..dc8243bc8546 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-scope-4.nix @@ -0,0 +1,10 @@ +let { + + x = "a"; + y = "b"; + + f = {x ? y, y ? x}: x + y; + + body = f {x = "c";} + f {y = "d";}; + +} diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-scope-6.exp b/tvix/eval/src/tests/nix_tests/eval-okay-scope-6.exp new file mode 100644 index 000000000000..00ff03a46c9b --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-scope-6.exp @@ -0,0 +1 @@ +"ccdd" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-scope-6.nix b/tvix/eval/src/tests/nix_tests/eval-okay-scope-6.nix new file mode 100644 index 000000000000..0995d4e7e7e0 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-scope-6.nix @@ -0,0 +1,7 @@ +let { + + f = {x ? y, y ? x}: x + y; + + body = f {x = "c";} + f {y = "d";}; + +} diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-scope-7.exp b/tvix/eval/src/tests/nix_tests/eval-okay-scope-7.exp new file mode 100644 index 000000000000..d00491fd7e5b --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-scope-7.exp @@ -0,0 +1 @@ +1 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-scope-7.nix b/tvix/eval/src/tests/nix_tests/eval-okay-scope-7.nix new file mode 100644 index 000000000000..4da02968f6b7 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-scope-7.nix @@ -0,0 +1,6 @@ +rec { + inherit (x) y; + x = { + y = 1; + }; +}.y diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-seq.exp b/tvix/eval/src/tests/nix_tests/eval-okay-seq.exp new file mode 100644 index 000000000000..0cfbf08886fc --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-seq.exp @@ -0,0 +1 @@ +2 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-seq.nix b/tvix/eval/src/tests/nix_tests/eval-okay-seq.nix new file mode 100644 index 000000000000..0a9a21c03b62 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-seq.nix @@ -0,0 +1 @@ +builtins.seq 1 2 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-sort.exp b/tvix/eval/src/tests/nix_tests/eval-okay-sort.exp new file mode 100644 index 000000000000..899119e20e38 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-sort.exp @@ -0,0 +1 @@ +[ [ 42 77 147 249 483 526 ] [ 526 483 249 147 77 42 ] [ "bar" "fnord" "foo" "xyzzy" ] [ { key = 1; value = "foo"; } { key = 1; value = "fnord"; } { key = 2; value = "bar"; } ] [ [ ] [ ] [ 1 ] [ 1 4 ] [ 1 5 ] [ 1 6 ] [ 2 ] [ 2 3 ] [ 3 ] [ 3 ] ] ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-sort.nix b/tvix/eval/src/tests/nix_tests/eval-okay-sort.nix new file mode 100644 index 000000000000..50aa78e40325 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-sort.nix @@ -0,0 +1,20 @@ +with builtins; + +[ (sort lessThan [ 483 249 526 147 42 77 ]) + (sort (x: y: y < x) [ 483 249 526 147 42 77 ]) + (sort lessThan [ "foo" "bar" "xyzzy" "fnord" ]) + (sort (x: y: x.key < y.key) + [ { key = 1; value = "foo"; } { key = 2; value = "bar"; } { key = 1; value = "fnord"; } ]) + (sort lessThan [ + [ 1 6 ] + [ ] + [ 2 3 ] + [ 3 ] + [ 1 5 ] + [ 2 ] + [ 1 ] + [ ] + [ 1 4 ] + [ 3 ] + ]) +] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-splitversion.exp b/tvix/eval/src/tests/nix_tests/eval-okay-splitversion.exp new file mode 100644 index 000000000000..153ceb8186a0 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-splitversion.exp @@ -0,0 +1 @@ +[ "1" "2" "3" ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-splitversion.nix b/tvix/eval/src/tests/nix_tests/eval-okay-splitversion.nix new file mode 100644 index 000000000000..9e5c99d2e7f6 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-splitversion.nix @@ -0,0 +1 @@ +builtins.splitVersion "1.2.3" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-string.exp b/tvix/eval/src/tests/nix_tests/eval-okay-string.exp new file mode 100644 index 000000000000..63f650f73a3a --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-string.exp @@ -0,0 +1 @@ +"foobar/a/b/c/d/foo/xyzzy/foo.txt/../foo/x/yescape: \"quote\" \n \\end\nof\nlinefoobarblaatfoo$bar$\"$\"$" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-string.nix b/tvix/eval/src/tests/nix_tests/eval-okay-string.nix new file mode 100644 index 000000000000..47cc989ad46a --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-string.nix @@ -0,0 +1,12 @@ +"foo" + "bar" + + toString (/a/b + /c/d) + + toString (/foo/bar + "/../xyzzy/." + "/foo.txt") + + ("/../foo" + toString /x/y) + + "escape: \"quote\" \n \\" + + "end +of +line" + + "foo${if true then "b${"a" + "r"}" else "xyzzy"}blaat" + + "foo$bar" + + "$\"$\"" + + "$" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-strings-as-attrs-names.exp b/tvix/eval/src/tests/nix_tests/eval-okay-strings-as-attrs-names.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-strings-as-attrs-names.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-strings-as-attrs-names.nix b/tvix/eval/src/tests/nix_tests/eval-okay-strings-as-attrs-names.nix new file mode 100644 index 000000000000..5e40928dbe31 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-strings-as-attrs-names.nix @@ -0,0 +1,20 @@ +let + + attr = { + "key 1" = "test"; + "key 2" = "caseok"; + }; + + t1 = builtins.getAttr "key 1" attr; + t2 = attr."key 2"; + t3 = attr ? "key 1"; + t4 = builtins.attrNames { inherit (attr) "key 1"; }; + + # This is permitted, but there is currently no way to reference this + # variable. + "foo bar" = 1; + +in t1 == "test" + && t2 == "caseok" + && t3 == true + && t4 == ["key 1"] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-substring.exp b/tvix/eval/src/tests/nix_tests/eval-okay-substring.exp new file mode 100644 index 000000000000..6aace04b0f57 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-substring.exp @@ -0,0 +1 @@ +"ooxfoobarybarzobaabbc" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-substring.nix b/tvix/eval/src/tests/nix_tests/eval-okay-substring.nix new file mode 100644 index 000000000000..424af00d9b3b --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-substring.nix @@ -0,0 +1,21 @@ +with builtins; + +let + + s = "foobar"; + +in + +substring 1 2 s ++ "x" ++ substring 0 (stringLength s) s ++ "y" ++ substring 3 100 s ++ "z" ++ substring 2 (sub (stringLength s) 3) s ++ "a" ++ substring 3 0 s ++ "b" ++ substring 3 1 s ++ "c" ++ substring 5 10 "perl" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-tail-call-1.exp b/tvix/eval/src/tests/nix_tests/eval-okay-tail-call-1.exp new file mode 100644 index 000000000000..f7393e847d34 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-tail-call-1.exp @@ -0,0 +1 @@ +100000 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-tail-call-1.nix b/tvix/eval/src/tests/nix_tests/eval-okay-tail-call-1.nix new file mode 100644 index 000000000000..a3962ce3fdb5 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-tail-call-1.nix @@ -0,0 +1,3 @@ +let + f = n: if n == 100000 then n else f (n + 1); +in f 0 diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-tojson.exp b/tvix/eval/src/tests/nix_tests/eval-okay-tojson.exp new file mode 100644 index 000000000000..e92aae3235f2 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-tojson.exp @@ -0,0 +1 @@ +"{\"a\":123,\"b\":-456,\"c\":\"foo\",\"d\":\"foo\\n\\\"bar\\\"\",\"e\":true,\"f\":false,\"g\":[1,2,3],\"h\":[\"a\",[\"b\",{\"foo\\nbar\":{}}]],\"i\":3,\"j\":1.44,\"k\":\"foo\"}" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-tojson.nix b/tvix/eval/src/tests/nix_tests/eval-okay-tojson.nix new file mode 100644 index 000000000000..ce67943bead5 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-tojson.nix @@ -0,0 +1,13 @@ +builtins.toJSON + { a = 123; + b = -456; + c = "foo"; + d = "foo\n\"bar\""; + e = true; + f = false; + g = [ 1 2 3 ]; + h = [ "a" [ "b" { "foo\nbar" = {}; } ] ]; + i = 1 + 2; + j = 1.44; + k = { __toString = self: self.a; a = "foo"; }; + } diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-toxml.exp b/tvix/eval/src/tests/nix_tests/eval-okay-toxml.exp new file mode 100644 index 000000000000..828220890ecd --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-toxml.exp @@ -0,0 +1 @@ +"<?xml version='1.0' encoding='utf-8'?>\n<expr>\n <attrs>\n <attr name=\"a\">\n <string value=\"s\" />\n </attr>\n </attrs>\n</expr>\n" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-toxml.nix b/tvix/eval/src/tests/nix_tests/eval-okay-toxml.nix new file mode 100644 index 000000000000..068c97a6c1b3 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-toxml.nix @@ -0,0 +1,3 @@ +# Make sure the expected XML output is produced; in particular, make sure it +# doesn't contain source location information. +builtins.toXML { a = "s"; } diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-toxml2.exp b/tvix/eval/src/tests/nix_tests/eval-okay-toxml2.exp new file mode 100644 index 000000000000..634a841eb190 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-toxml2.exp @@ -0,0 +1 @@ +"<?xml version='1.0' encoding='utf-8'?>\n<expr>\n <list>\n <string value=\"ab\" />\n <int value=\"10\" />\n <attrs>\n <attr name=\"x\">\n <string value=\"x\" />\n </attr>\n <attr name=\"y\">\n <string value=\"x\" />\n </attr>\n </attrs>\n </list>\n</expr>\n" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-toxml2.nix b/tvix/eval/src/tests/nix_tests/eval-okay-toxml2.nix new file mode 100644 index 000000000000..ff1791b30eb5 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-toxml2.nix @@ -0,0 +1 @@ +builtins.toXML [("a" + "b") 10 (rec {x = "x"; y = x;})] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-tryeval.exp b/tvix/eval/src/tests/nix_tests/eval-okay-tryeval.exp new file mode 100644 index 000000000000..2b2e6fa711f4 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-tryeval.exp @@ -0,0 +1 @@ +{ x = { success = true; value = "x"; }; y = { success = false; value = false; }; z = { success = false; value = false; }; } diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-tryeval.nix b/tvix/eval/src/tests/nix_tests/eval-okay-tryeval.nix new file mode 100644 index 000000000000..629bc440a85a --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-tryeval.nix @@ -0,0 +1,5 @@ +{ + x = builtins.tryEval "x"; + y = builtins.tryEval (assert false; "y"); + z = builtins.tryEval (throw "bla"); +} diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-types.exp b/tvix/eval/src/tests/nix_tests/eval-okay-types.exp new file mode 100644 index 000000000000..92a15329935a --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-types.exp @@ -0,0 +1 @@ +[ true false true false true false true false true true true true true true true true true true true false true true true false "int" "bool" "string" "null" "set" "list" "lambda" "lambda" "lambda" "lambda" ] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-types.nix b/tvix/eval/src/tests/nix_tests/eval-okay-types.nix new file mode 100644 index 000000000000..9b58be5d1dd4 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-types.nix @@ -0,0 +1,37 @@ +with builtins; + +[ (isNull null) + (isNull (x: x)) + (isFunction (x: x)) + (isFunction "fnord") + (isString ("foo" + "bar")) + (isString [ "x" ]) + (isInt (1 + 2)) + (isInt { x = 123; }) + (isInt (1 / 2)) + (isInt (1 + 1)) + (isInt (1 / 2)) + (isInt (1 * 2)) + (isInt (1 - 2)) + (isFloat (1.2)) + (isFloat (1 + 1.0)) + (isFloat (1 / 2.0)) + (isFloat (1 * 2.0)) + (isFloat (1 - 2.0)) + (isBool (true && false)) + (isBool null) + (isPath /nix/store) + (isPath ./.) + (isAttrs { x = 123; }) + (isAttrs null) + (typeOf (3 * 4)) + (typeOf true) + (typeOf "xyzzy") + (typeOf null) + (typeOf { x = 456; }) + (typeOf [ 1 2 3 ]) + (typeOf (x: x)) + (typeOf ((x: y: x) 1)) + (typeOf map) + (typeOf (map (x: x))) +] diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-versions.exp b/tvix/eval/src/tests/nix_tests/eval-okay-versions.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-versions.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-versions.nix b/tvix/eval/src/tests/nix_tests/eval-okay-versions.nix new file mode 100644 index 000000000000..e9111f5f4331 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-versions.nix @@ -0,0 +1,43 @@ +let + + name1 = "hello-1.0.2"; + name2 = "hello"; + name3 = "915resolution-0.5.2"; + name4 = "xf86-video-i810-1.7.4"; + name5 = "name-that-ends-with-dash--1.0"; + + eq = 0; + lt = builtins.sub 0 1; + gt = 1; + + versionTest = v1: v2: expected: + let d1 = builtins.compareVersions v1 v2; + d2 = builtins.compareVersions v2 v1; + in d1 == builtins.sub 0 d2 && d1 == expected; + + tests = [ + ((builtins.parseDrvName name1).name == "hello") + ((builtins.parseDrvName name1).version == "1.0.2") + ((builtins.parseDrvName name2).name == "hello") + ((builtins.parseDrvName name2).version == "") + ((builtins.parseDrvName name3).name == "915resolution") + ((builtins.parseDrvName name3).version == "0.5.2") + ((builtins.parseDrvName name4).name == "xf86-video-i810") + ((builtins.parseDrvName name4).version == "1.7.4") + ((builtins.parseDrvName name5).name == "name-that-ends-with-dash") + ((builtins.parseDrvName name5).version == "-1.0") + (versionTest "1.0" "2.3" lt) + (versionTest "2.1" "2.3" lt) + (versionTest "2.3" "2.3" eq) + (versionTest "2.5" "2.3" gt) + (versionTest "3.1" "2.3" gt) + (versionTest "2.3.1" "2.3" gt) + (versionTest "2.3.1" "2.3a" gt) + (versionTest "2.3pre1" "2.3" lt) + (versionTest "2.3pre3" "2.3pre12" lt) + (versionTest "2.3a" "2.3c" lt) + (versionTest "2.3pre1" "2.3c" lt) + (versionTest "2.3pre1" "2.3q" lt) + ]; + +in (import ./lib.nix).and tests diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-with.exp b/tvix/eval/src/tests/nix_tests/eval-okay-with.exp new file mode 100644 index 000000000000..378c8dc80403 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-with.exp @@ -0,0 +1 @@ +"xyzzybarxyzzybar" diff --git a/tvix/eval/src/tests/nix_tests/eval-okay-with.nix b/tvix/eval/src/tests/nix_tests/eval-okay-with.nix new file mode 100644 index 000000000000..033e8d3aba57 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/eval-okay-with.nix @@ -0,0 +1,19 @@ +let { + + a = "xyzzy"; + + as = { + a = "foo"; + b = "bar"; + }; + + bs = { + a = "bar"; + }; + + x = with as; a + b; + + y = with as; with bs; a + b; + + body = x + y; +} diff --git a/tvix/eval/src/tests/nix_tests/imported.nix b/tvix/eval/src/tests/nix_tests/imported.nix new file mode 100644 index 000000000000..fb39ee4efacd --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/imported.nix @@ -0,0 +1,3 @@ +# The function ‘range’ comes from lib.nix and was added to the lexical +# scope by scopedImport. +range 1 5 ++ import ./imported2.nix diff --git a/tvix/eval/src/tests/nix_tests/imported2.nix b/tvix/eval/src/tests/nix_tests/imported2.nix new file mode 100644 index 000000000000..6d0a2992b732 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/imported2.nix @@ -0,0 +1 @@ +range 6 10 diff --git a/tvix/eval/src/tests/nix_tests/lib.nix b/tvix/eval/src/tests/nix_tests/lib.nix new file mode 100644 index 000000000000..028a538314b7 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/lib.nix @@ -0,0 +1,61 @@ +with builtins; + +rec { + + fold = op: nul: list: + if list == [] + then nul + else op (head list) (fold op nul (tail list)); + + concat = + fold (x: y: x + y) ""; + + and = fold (x: y: x && y) true; + + flatten = x: + if isList x + then fold (x: y: (flatten x) ++ y) [] x + else [x]; + + sum = foldl' (x: y: add x y) 0; + + hasSuffix = ext: fileName: + let lenFileName = stringLength fileName; + lenExt = stringLength ext; + in !(lessThan lenFileName lenExt) && + substring (sub lenFileName lenExt) lenFileName fileName == ext; + + # Split a list at the given position. + splitAt = pos: list: + if pos == 0 then {first = []; second = list;} else + if list == [] then {first = []; second = [];} else + let res = splitAt (sub pos 1) (tail list); + in {first = [(head list)] ++ res.first; second = res.second;}; + + # Stable merge sort. + sortBy = comp: list: + if lessThan 1 (length list) + then + let + split = splitAt (div (length list) 2) list; + first = sortBy comp split.first; + second = sortBy comp split.second; + in mergeLists comp first second + else list; + + mergeLists = comp: list1: list2: + if list1 == [] then list2 else + if list2 == [] then list1 else + if comp (head list2) (head list1) then [(head list2)] ++ mergeLists comp list1 (tail list2) else + [(head list1)] ++ mergeLists comp (tail list1) list2; + + id = x: x; + + const = x: y: x; + + range = first: last: + if first > last + then [] + else genList (n: first + n) (last - first + 1); + +} diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-fail-bad-antiquote-2.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-fail-bad-antiquote-2.nix new file mode 100644 index 000000000000..3745235ce95e --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-fail-bad-antiquote-2.nix @@ -0,0 +1 @@ +"${./fnord}" diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-fail-nonexist-path.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-fail-nonexist-path.nix new file mode 100644 index 000000000000..f2f08107b516 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-fail-nonexist-path.nix @@ -0,0 +1,4 @@ +# This must fail to evaluate, since ./fnord doesn't exist. If it did +# exist, it would produce "/nix/store/<hash>-fnord/xyzzy" (with an +# appropriate context). +"${./fnord}/xyzzy" diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-fail-scope-5.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-fail-scope-5.nix new file mode 100644 index 000000000000..f89a65a99be3 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-fail-scope-5.nix @@ -0,0 +1,10 @@ +let { + + x = "a"; + y = "b"; + + f = {x ? y, y ? x}: x + y; + + body = f {}; + +} diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-fail-undeclared-arg.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-fail-undeclared-arg.nix new file mode 100644 index 000000000000..cafdf1636272 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-fail-undeclared-arg.nix @@ -0,0 +1 @@ +({x, z}: x + z) {x = "foo"; y = "bla"; z = "bar";} diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-attrs6.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-attrs6.exp new file mode 100644 index 000000000000..b46938032e73 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-attrs6.exp @@ -0,0 +1 @@ +{ __overrides = { bar = "qux"; }; bar = "qux"; foo = "bar"; } diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-attrs6.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-attrs6.nix new file mode 100644 index 000000000000..2e5c85483be6 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-attrs6.nix @@ -0,0 +1,4 @@ +rec { + "${"foo"}" = "bar"; + __overrides = { bar = "qux"; }; +} diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-autoargs.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-autoargs.exp new file mode 100644 index 000000000000..7a8391786a09 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-autoargs.exp @@ -0,0 +1 @@ +"xyzzy!xyzzy!foobar" diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-autoargs.flags b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-autoargs.flags new file mode 100644 index 000000000000..217c7a5ae291 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-autoargs.flags @@ -0,0 +1 @@ +--arg lib import(nix_tests/lib.nix) --argstr xyzzy xyzzy! -A result diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-autoargs.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-autoargs.nix new file mode 100644 index 000000000000..815f51b1d67a --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-autoargs.nix @@ -0,0 +1,15 @@ +let + + foobar = "foobar"; + +in + +{ xyzzy2 ? xyzzy # mutually recursive args +, xyzzy ? "blaat" # will be overridden by --argstr +, fb ? foobar +, lib # will be set by --arg +}: + +{ + result = lib.concat [xyzzy xyzzy2 fb]; +} diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-closure.exp.xml b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-closure.exp.xml new file mode 100644 index 000000000000..dffc03a99891 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-closure.exp.xml @@ -0,0 +1,343 @@ +<?xml version='1.0' encoding='utf-8'?> +<expr> + <list> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="-13" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="-12" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="-11" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="-9" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="-8" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="-7" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="-5" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="-4" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="-3" /> + </attr> + </attrs> + <attrs> + <attr name="key"> + <int value="-1" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="0" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="1" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="2" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="4" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="5" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="6" /> + </attr> + </attrs> + <attrs> + <attr name="key"> + <int value="8" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="9" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="10" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="13" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="14" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="15" /> + </attr> + </attrs> + <attrs> + <attr name="key"> + <int value="17" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="18" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="19" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="22" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="23" /> + </attr> + </attrs> + <attrs> + <attr name="key"> + <int value="26" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="27" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="28" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="31" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="32" /> + </attr> + </attrs> + <attrs> + <attr name="key"> + <int value="35" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="36" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="40" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="41" /> + </attr> + </attrs> + <attrs> + <attr name="key"> + <int value="44" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="45" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="49" /> + </attr> + </attrs> + <attrs> + <attr name="key"> + <int value="53" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="54" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="58" /> + </attr> + </attrs> + <attrs> + <attr name="key"> + <int value="62" /> + </attr> + </attrs> + <attrs> + <attr name="foo"> + <bool value="true" /> + </attr> + <attr name="key"> + <int value="67" /> + </attr> + </attrs> + <attrs> + <attr name="key"> + <int value="71" /> + </attr> + </attrs> + <attrs> + <attr name="key"> + <int value="80" /> + </attr> + </attrs> + </list> +</expr> diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-context-introspection.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-context-introspection.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-context-introspection.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-context-introspection.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-context-introspection.nix new file mode 100644 index 000000000000..43178bd2eef9 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-context-introspection.nix @@ -0,0 +1,24 @@ +let + drv = derivation { + name = "fail"; + builder = "/bin/false"; + system = "x86_64-linux"; + outputs = [ "out" "foo" ]; + }; + + path = "${./eval-okay-context-introspection.nix}"; + + desired-context = { + "${builtins.unsafeDiscardStringContext path}" = { + path = true; + }; + "${builtins.unsafeDiscardStringContext drv.drvPath}" = { + outputs = [ "foo" "out" ]; + allOutputs = true; + }; + }; + + legit-context = builtins.getContext "${path}${drv.outPath}${drv.foo.outPath}${drv.drvPath}"; + + constructed-context = builtins.getContext (builtins.appendContext "" desired-context); +in legit-context == constructed-context diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-context.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-context.exp new file mode 100644 index 000000000000..2f535bdbc454 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-context.exp @@ -0,0 +1 @@ +"foo eval-okay-context.nix bar" diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-context.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-context.nix new file mode 100644 index 000000000000..7b9531cfe9e1 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-context.nix @@ -0,0 +1,6 @@ +let s = "foo ${builtins.substring 33 100 (baseNameOf "${./eval-okay-context.nix}")} bar"; +in + if s != "foo eval-okay-context.nix bar" + then abort "context not discarded" + else builtins.unsafeDiscardStringContext s + diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-curpos.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-curpos.exp new file mode 100644 index 000000000000..65fd65b4d01f --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-curpos.exp @@ -0,0 +1 @@ +[ 3 7 4 9 ] diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-curpos.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-curpos.nix new file mode 100644 index 000000000000..b79553df0bd3 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-curpos.nix @@ -0,0 +1,5 @@ +# Bla +let + x = __curPos; + y = __curPos; +in [ x.line x.column y.line y.column ] diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-delayed-with.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-delayed-with.exp new file mode 100644 index 000000000000..8e7c61ab8e77 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-delayed-with.exp @@ -0,0 +1 @@ +"b-overridden b-overridden a" diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-delayed-with.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-delayed-with.nix new file mode 100644 index 000000000000..3fb023e1cd42 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-delayed-with.nix @@ -0,0 +1,29 @@ +let + + pkgs_ = with pkgs; { + a = derivation { + name = "a"; + system = builtins.currentSystem; + builder = "/bin/sh"; + args = [ "-c" "touch $out" ]; + inherit b; + }; + + b = derivation { + name = "b"; + system = builtins.currentSystem; + builder = "/bin/sh"; + args = [ "-c" "touch $out" ]; + inherit a; + }; + + c = b; + }; + + packageOverrides = pkgs: with pkgs; { + b = derivation (b.drvAttrs // { name = "${b.name}-overridden"; }); + }; + + pkgs = pkgs_ // (packageOverrides pkgs_); + +in "${pkgs.a.b.name} ${pkgs.c.name} ${pkgs.b.a.name}" diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-eq-derivations.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-eq-derivations.exp new file mode 100644 index 000000000000..ec04aab6aeec --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-eq-derivations.exp @@ -0,0 +1 @@ +[ true true true false ] diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-eq-derivations.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-eq-derivations.nix new file mode 100644 index 000000000000..d526cb4a2161 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-eq-derivations.nix @@ -0,0 +1,10 @@ +let + + drvA1 = derivation { name = "a"; builder = "/foo"; system = "i686-linux"; }; + drvA2 = derivation { name = "a"; builder = "/foo"; system = "i686-linux"; }; + drvA3 = derivation { name = "a"; builder = "/foo"; system = "i686-linux"; } // { dummy = 1; }; + + drvC1 = derivation { name = "c"; builder = "/foo"; system = "i686-linux"; }; + drvC2 = derivation { name = "c"; builder = "/bar"; system = "i686-linux"; }; + +in [ (drvA1 == drvA1) (drvA1 == drvA2) (drvA1 == drvA3) (drvC1 == drvC2) ] diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-floor-ceil.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-floor-ceil.exp new file mode 100644 index 000000000000..81f80420b996 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-floor-ceil.exp @@ -0,0 +1 @@ +"23;24;23;23" diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-floor-ceil.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-floor-ceil.nix new file mode 100644 index 000000000000..d76a0d86ea78 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-floor-ceil.nix @@ -0,0 +1,9 @@ +with import ./lib.nix; + +let + n1 = builtins.floor 23.5; + n2 = builtins.ceil 23.5; + n3 = builtins.floor 23; + n4 = builtins.ceil 23; +in + builtins.concatStringsSep ";" (map toString [ n1 n2 n3 n4 ]) diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-functionargs.exp.xml b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-functionargs.exp.xml new file mode 100644 index 000000000000..651f54c36341 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-functionargs.exp.xml @@ -0,0 +1,15 @@ +<?xml version='1.0' encoding='utf-8'?> +<expr> + <list> + <string value="stdenv" /> + <string value="fetchurl" /> + <string value="aterm-stdenv" /> + <string value="aterm-stdenv2" /> + <string value="libX11" /> + <string value="libXv" /> + <string value="mplayer-stdenv2.libXv-libX11" /> + <string value="mplayer-stdenv2.libXv-libX11_2" /> + <string value="nix-stdenv-aterm-stdenv" /> + <string value="nix-stdenv2-aterm2-stdenv2" /> + </list> +</expr> diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-functionargs.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-functionargs.nix new file mode 100644 index 000000000000..68dca62ee18d --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-functionargs.nix @@ -0,0 +1,80 @@ +let + + stdenvFun = { }: { name = "stdenv"; }; + stdenv2Fun = { }: { name = "stdenv2"; }; + fetchurlFun = { stdenv }: assert stdenv.name == "stdenv"; { name = "fetchurl"; }; + atermFun = { stdenv, fetchurl }: { name = "aterm-${stdenv.name}"; }; + aterm2Fun = { stdenv, fetchurl }: { name = "aterm2-${stdenv.name}"; }; + nixFun = { stdenv, fetchurl, aterm }: { name = "nix-${stdenv.name}-${aterm.name}"; }; + + mplayerFun = + { stdenv, fetchurl, enableX11 ? false, xorg ? null, enableFoo ? true, foo ? null }: + assert stdenv.name == "stdenv2"; + assert enableX11 -> xorg.libXv.name == "libXv"; + assert enableFoo -> foo != null; + { name = "mplayer-${stdenv.name}.${xorg.libXv.name}-${xorg.libX11.name}"; }; + + makeOverridable = f: origArgs: f origArgs // + { override = newArgs: + makeOverridable f (origArgs // (if builtins.isFunction newArgs then newArgs origArgs else newArgs)); + }; + + callPackage_ = pkgs: f: args: + makeOverridable f ((builtins.intersectAttrs (builtins.functionArgs f) pkgs) // args); + + allPackages = + { overrides ? (pkgs: pkgsPrev: { }) }: + let + callPackage = callPackage_ pkgs; + pkgs = pkgsStd // (overrides pkgs pkgsStd); + pkgsStd = { + inherit pkgs; + stdenv = callPackage stdenvFun { }; + stdenv2 = callPackage stdenv2Fun { }; + fetchurl = callPackage fetchurlFun { }; + aterm = callPackage atermFun { }; + xorg = callPackage xorgFun { }; + mplayer = callPackage mplayerFun { stdenv = pkgs.stdenv2; enableFoo = false; }; + nix = callPackage nixFun { }; + }; + in pkgs; + + libX11Fun = { stdenv, fetchurl }: { name = "libX11"; }; + libX11_2Fun = { stdenv, fetchurl }: { name = "libX11_2"; }; + libXvFun = { stdenv, fetchurl, libX11 }: { name = "libXv"; }; + + xorgFun = + { pkgs }: + let callPackage = callPackage_ (pkgs // pkgs.xorg); in + { + libX11 = callPackage libX11Fun { }; + libXv = callPackage libXvFun { }; + }; + +in + +let + + pkgs = allPackages { }; + + pkgs2 = allPackages { + overrides = pkgs: pkgsPrev: { + stdenv = pkgs.stdenv2; + nix = pkgsPrev.nix.override { aterm = aterm2Fun { inherit (pkgs) stdenv fetchurl; }; }; + xorg = pkgsPrev.xorg // { libX11 = libX11_2Fun { inherit (pkgs) stdenv fetchurl; }; }; + }; + }; + +in + + [ pkgs.stdenv.name + pkgs.fetchurl.name + pkgs.aterm.name + pkgs2.aterm.name + pkgs.xorg.libX11.name + pkgs.xorg.libXv.name + pkgs.mplayer.name + pkgs2.mplayer.name + pkgs.nix.name + pkgs2.nix.name + ] diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getattrpos-functionargs.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getattrpos-functionargs.exp new file mode 100644 index 000000000000..7f9ac40e81b1 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getattrpos-functionargs.exp @@ -0,0 +1 @@ +{ column = 11; file = "eval-okay-getattrpos-functionargs.nix"; line = 2; } diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getattrpos-functionargs.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getattrpos-functionargs.nix new file mode 100644 index 000000000000..11d6bb0e3ac9 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getattrpos-functionargs.nix @@ -0,0 +1,4 @@ +let + fun = { foo }: {}; + pos = builtins.unsafeGetAttrPos "foo" (builtins.functionArgs fun); +in { inherit (pos) column line; file = baseNameOf pos.file; } diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getattrpos-undefined.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getattrpos-undefined.exp new file mode 100644 index 000000000000..19765bd501b6 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getattrpos-undefined.exp @@ -0,0 +1 @@ +null diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getattrpos-undefined.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getattrpos-undefined.nix new file mode 100644 index 000000000000..14dd38f7734c --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getattrpos-undefined.nix @@ -0,0 +1 @@ +builtins.unsafeGetAttrPos "abort" builtins diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getattrpos.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getattrpos.exp new file mode 100644 index 000000000000..469249bbc646 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getattrpos.exp @@ -0,0 +1 @@ +{ column = 5; file = "eval-okay-getattrpos.nix"; line = 3; } diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getattrpos.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getattrpos.nix new file mode 100644 index 000000000000..ca6b07961547 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getattrpos.nix @@ -0,0 +1,6 @@ +let + as = { + foo = "bar"; + }; + pos = builtins.unsafeGetAttrPos "foo" as; +in { inherit (pos) column line; file = baseNameOf pos.file; } diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getenv.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getenv.exp new file mode 100644 index 000000000000..14e24d419005 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getenv.exp @@ -0,0 +1 @@ +"foobar" diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getenv.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getenv.nix new file mode 100644 index 000000000000..4cfec5f553d9 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-getenv.nix @@ -0,0 +1 @@ +builtins.getEnv "TEST_VAR" + (if builtins.getEnv "NO_SUCH_VAR" == "" then "bar" else "bla") diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-groupBy.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-groupBy.exp new file mode 100644 index 000000000000..bfca5652a59b --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-groupBy.exp @@ -0,0 +1 @@ +{ "1" = [ 9 ]; "2" = [ 8 ]; "3" = [ 13 29 ]; "4" = [ 3 4 10 11 17 18 ]; "5" = [ 0 23 26 28 ]; "6" = [ 1 12 21 27 30 ]; "7" = [ 7 22 ]; "8" = [ 14 ]; "9" = [ 19 ]; b = [ 16 25 ]; c = [ 24 ]; d = [ 2 ]; e = [ 5 6 15 31 ]; f = [ 20 ]; } diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-groupBy.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-groupBy.nix new file mode 100644 index 000000000000..7e0eab28b036 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-groupBy.nix @@ -0,0 +1,5 @@ +with import ./../lib.nix; + +builtins.groupBy (n: + builtins.substring 0 1 (builtins.hashString "sha256" (toString n)) +) (range 0 31) diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-hashfile.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-hashfile.exp new file mode 100644 index 000000000000..ff1e8293ef22 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-hashfile.exp @@ -0,0 +1 @@ +[ "d3b07384d113edec49eaa6238ad5ff00" "0f343b0931126a20f133d67c2b018a3b" "f1d2d2f924e986ac86fdf7b36c94bcdf32beec15" "60cacbf3d72e1e7834203da608037b1bf83b40e8" "b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c" "5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef" "0cf9180a764aba863a67b6d72f0918bc131c6772642cb2dce5a34f0a702f9470ddc2bf125c12198b1995c233c34b4afd346c54a2334c350a948a51b6e8b4e6b6" "8efb4f73c5655351c444eb109230c556d39e2c7624e9c11abc9e3fb4b9b9254218cc5085b454a9698d085cfa92198491f07a723be4574adc70617b73eb0b6461" ] diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-hashfile.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-hashfile.nix new file mode 100644 index 000000000000..8c9de66b7ecf --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-hashfile.nix @@ -0,0 +1,4 @@ +let + paths = [ ./../data ./../binary-data ]; +in + builtins.concatLists (map (hash: map (builtins.hashFile hash) paths) ["md5" "sha1" "sha256" "sha512"]) diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-hashstring.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-hashstring.exp new file mode 100644 index 000000000000..d720a082ddb3 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-hashstring.exp @@ -0,0 +1 @@ +[ "d41d8cd98f00b204e9800998ecf8427e" "6c69ee7f211c640419d5366cc076ae46" "bb3438fbabd460ea6dbd27d153e2233b" "da39a3ee5e6b4b0d3255bfef95601890afd80709" "cd54e8568c1b37cf1e5badb0779bcbf382212189" "6d12e10b1d331dad210e47fd25d4f260802b7e77" "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" "900a4469df00ccbfd0c145c6d1e4b7953dd0afafadd7534e3a4019e8d38fc663" "ad0387b3bd8652f730ca46d25f9c170af0fd589f42e7f23f5a9e6412d97d7e56" "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e" "9d0886f8c6b389398a16257bc79780fab9831c7fc11c8ab07fa732cb7b348feade382f92617c9c5305fefba0af02ab5fd39a587d330997ff5bd0db19f7666653" "21644b72aa259e5a588cd3afbafb1d4310f4889680f6c83b9d531596a5a284f34dbebff409d23bcc86aee6bad10c891606f075c6f4755cb536da27db5693f3a7" ] diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-hashstring.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-hashstring.nix new file mode 100644 index 000000000000..b0f62b245ca8 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-hashstring.nix @@ -0,0 +1,4 @@ +let + strings = [ "" "text 1" "text 2" ]; +in + builtins.concatLists (map (hash: map (builtins.hashString hash) strings) ["md5" "sha1" "sha256" "sha512"]) diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-import.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-import.exp new file mode 100644 index 000000000000..c508125b55be --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-import.exp @@ -0,0 +1 @@ +[ 1 2 3 4 5 6 7 8 9 10 ] diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-import.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-import.nix new file mode 100644 index 000000000000..76213a9541e3 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-import.nix @@ -0,0 +1,11 @@ +let + + overrides = { + import = fn: scopedImport overrides fn; + + scopedImport = attrs: fn: scopedImport (overrides // attrs) fn; + + builtins = builtins // overrides; + } // import ./../lib.nix; + +in scopedImport overrides ./../imported.nix diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-overrides.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-overrides.exp new file mode 100644 index 000000000000..0cfbf08886fc --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-overrides.exp @@ -0,0 +1 @@ +2 diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-overrides.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-overrides.nix new file mode 100644 index 000000000000..358742b36e22 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-overrides.nix @@ -0,0 +1,9 @@ +let + + overrides = { a = 2; }; + +in (rec { + __overrides = overrides; + x = a; + a = 1; +}).x diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-path.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-path.nix new file mode 100644 index 000000000000..e67168cf3edf --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-path.nix @@ -0,0 +1,7 @@ +builtins.path + { path = ./.; + filter = path: _: baseNameOf path == "data"; + recursive = true; + sha256 = "1yhm3gwvg5a41yylymgblsclk95fs6jy72w0wv925mmidlhcq4sw"; + name = "output"; + } diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-readDir.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-readDir.exp new file mode 100644 index 000000000000..bf8d2c14ea4f --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-readDir.exp @@ -0,0 +1 @@ +{ bar = "regular"; foo = "directory"; } diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-readDir.nix.disabled b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-readDir.nix.disabled new file mode 100644 index 000000000000..a7ec9292aae2 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-readDir.nix.disabled @@ -0,0 +1 @@ +builtins.readDir ./readDir diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-search-path.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-search-path.exp new file mode 100644 index 000000000000..4519bc406db5 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-search-path.exp @@ -0,0 +1 @@ +"abccX" diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-search-path.flags b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-search-path.flags new file mode 100644 index 000000000000..a28e6821004a --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-search-path.flags @@ -0,0 +1 @@ +-I lang/dir1 -I lang/dir2 -I dir5=lang/dir3 \ No newline at end of file diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-search-path.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-search-path.nix new file mode 100644 index 000000000000..6fe33decc019 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-search-path.nix @@ -0,0 +1,10 @@ +with import ./lib.nix; +with builtins; + +assert isFunction (import <nix/fetchurl.nix>); + +assert length __nixPath == 5; +assert length (filter (x: baseNameOf x.path == "dir4") __nixPath) == 1; + +import <a.nix> + import <b.nix> + import <c.nix> + import <dir5/c.nix> + + (let __nixPath = [ { path = ./dir2; } { path = ./dir1; } ]; in import <a.nix>) diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-xml.exp.xml b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-xml.exp.xml new file mode 100644 index 000000000000..20099326cc96 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-xml.exp.xml @@ -0,0 +1,52 @@ +<?xml version='1.0' encoding='utf-8'?> +<expr> + <attrs> + <attr name="a"> + <string value="foo" /> + </attr> + <attr name="at"> + <function> + <attrspat name="args"> + <attr name="x" /> + <attr name="y" /> + <attr name="z" /> + </attrspat> + </function> + </attr> + <attr name="b"> + <string value="bar" /> + </attr> + <attr name="c"> + <string value="foobar" /> + </attr> + <attr name="ellipsis"> + <function> + <attrspat ellipsis="1"> + <attr name="x" /> + <attr name="y" /> + <attr name="z" /> + </attrspat> + </function> + </attr> + <attr name="f"> + <function> + <attrspat> + <attr name="x" /> + <attr name="y" /> + <attr name="z" /> + </attrspat> + </function> + </attr> + <attr name="id"> + <function> + <varpat name="x" /> + </function> + </attr> + <attr name="x"> + <int value="123" /> + </attr> + <attr name="y"> + <float value="567.89" /> + </attr> + </attrs> +</expr> diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-xml.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-xml.nix new file mode 100644 index 000000000000..9ee9f8a0b4f5 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-xml.nix @@ -0,0 +1,21 @@ +rec { + + x = 123; + + y = 567.890; + + a = "foo"; + + b = "bar"; + + c = "foo" + "bar"; + + f = {z, x, y}: if y then x else z; + + id = x: x; + + at = args@{x, y, z}: x; + + ellipsis = {x, y, z, ...}: x; + +} diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-zipAttrsWith.exp b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-zipAttrsWith.exp new file mode 100644 index 000000000000..9c0b15d22ba4 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-zipAttrsWith.exp @@ -0,0 +1 @@ +{ "0" = { n = "0"; v = [ 5 23 29 ]; }; "1" = { n = "1"; v = [ 7 30 ]; }; "2" = { n = "2"; v = [ 18 ]; }; "4" = { n = "4"; v = [ 10 ]; }; "5" = { n = "5"; v = [ 15 25 26 31 ]; }; "6" = { n = "6"; v = [ 3 14 ]; }; "7" = { n = "7"; v = [ 12 ]; }; "8" = { n = "8"; v = [ 2 6 8 9 ]; }; "9" = { n = "9"; v = [ 0 16 ]; }; a = { n = "a"; v = [ 17 21 22 27 ]; }; c = { n = "c"; v = [ 11 24 ]; }; d = { n = "d"; v = [ 4 13 28 ]; }; e = { n = "e"; v = [ 20 ]; }; f = { n = "f"; v = [ 1 19 ]; }; } diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-zipAttrsWith.nix b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-zipAttrsWith.nix new file mode 100644 index 000000000000..e5d4cdccb74e --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/eval-okay-zipAttrsWith.nix @@ -0,0 +1,9 @@ +with import ./../lib.nix; + +let + str = builtins.hashString "sha256" "test"; +in +builtins.zipAttrsWith + (n: v: { inherit n v; }) + (map (n: { ${builtins.substring n 1 str} = n; }) + (range 0 31)) diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/readDir/bar b/tvix/eval/src/tests/nix_tests/notyetpassing/readDir/bar new file mode 100644 index 000000000000..e69de29bb2d1 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/readDir/bar diff --git a/tvix/eval/src/tests/nix_tests/notyetpassing/readDir/foo/git-hates-directories b/tvix/eval/src/tests/nix_tests/notyetpassing/readDir/foo/git-hates-directories new file mode 100644 index 000000000000..e69de29bb2d1 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/notyetpassing/readDir/foo/git-hates-directories diff --git a/tvix/eval/src/tests/nix_tests/parse-fail-dup-attrs-1.nix b/tvix/eval/src/tests/nix_tests/parse-fail-dup-attrs-1.nix new file mode 100644 index 000000000000..2c02317d2a19 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-fail-dup-attrs-1.nix @@ -0,0 +1,4 @@ +{ x = 123; + y = 456; + x = 789; +} diff --git a/tvix/eval/src/tests/nix_tests/parse-fail-dup-attrs-2.nix b/tvix/eval/src/tests/nix_tests/parse-fail-dup-attrs-2.nix new file mode 100644 index 000000000000..864d9865e07d --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-fail-dup-attrs-2.nix @@ -0,0 +1,13 @@ +let { + + as = { + x = 123; + y = 456; + }; + + bs = { + x = 789; + inherit (as) x; + }; + +} diff --git a/tvix/eval/src/tests/nix_tests/parse-fail-dup-attrs-3.nix b/tvix/eval/src/tests/nix_tests/parse-fail-dup-attrs-3.nix new file mode 100644 index 000000000000..114d19779f86 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-fail-dup-attrs-3.nix @@ -0,0 +1,13 @@ +let { + + as = { + x = 123; + y = 456; + }; + + bs = rec { + x = 789; + inherit (as) x; + }; + +} diff --git a/tvix/eval/src/tests/nix_tests/parse-fail-dup-attrs-4.nix b/tvix/eval/src/tests/nix_tests/parse-fail-dup-attrs-4.nix new file mode 100644 index 000000000000..77417432b347 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-fail-dup-attrs-4.nix @@ -0,0 +1,4 @@ +{ + services.ssh.port = 22; + services.ssh.port = 23; +} diff --git a/tvix/eval/src/tests/nix_tests/parse-fail-dup-attrs-7.nix b/tvix/eval/src/tests/nix_tests/parse-fail-dup-attrs-7.nix new file mode 100644 index 000000000000..bbc3eb08c0f6 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-fail-dup-attrs-7.nix @@ -0,0 +1,9 @@ +rec { + + x = 1; + + as = { + inherit x; + inherit x; + }; +} \ No newline at end of file diff --git a/tvix/eval/src/tests/nix_tests/parse-fail-dup-formals.nix b/tvix/eval/src/tests/nix_tests/parse-fail-dup-formals.nix new file mode 100644 index 000000000000..a0edd91a9666 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-fail-dup-formals.nix @@ -0,0 +1 @@ +{x, y, x}: x \ No newline at end of file diff --git a/tvix/eval/src/tests/nix_tests/parse-fail-eof-in-string.nix b/tvix/eval/src/tests/nix_tests/parse-fail-eof-in-string.nix new file mode 100644 index 000000000000..19775d2ec810 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-fail-eof-in-string.nix @@ -0,0 +1,3 @@ +# https://github.com/NixOS/nix/issues/6562 +# Note that this file must not end with a newline. +a 1"$ \ No newline at end of file diff --git a/tvix/eval/src/tests/nix_tests/parse-fail-mixed-nested-attrs1.nix b/tvix/eval/src/tests/nix_tests/parse-fail-mixed-nested-attrs1.nix new file mode 100644 index 000000000000..11e40e66fd1b --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-fail-mixed-nested-attrs1.nix @@ -0,0 +1,4 @@ +{ + x.z = 3; + x = { y = 3; z = 3; }; +} diff --git a/tvix/eval/src/tests/nix_tests/parse-fail-mixed-nested-attrs2.nix b/tvix/eval/src/tests/nix_tests/parse-fail-mixed-nested-attrs2.nix new file mode 100644 index 000000000000..17da82e5f0c7 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-fail-mixed-nested-attrs2.nix @@ -0,0 +1,4 @@ +{ + x.y.y = 3; + x = { y.y= 3; z = 3; }; +} diff --git a/tvix/eval/src/tests/nix_tests/parse-fail-patterns-1.nix b/tvix/eval/src/tests/nix_tests/parse-fail-patterns-1.nix new file mode 100644 index 000000000000..7b40616417b8 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-fail-patterns-1.nix @@ -0,0 +1 @@ +args@{args, x, y, z}: x diff --git a/tvix/eval/src/tests/nix_tests/parse-fail-regression-20060610.nix b/tvix/eval/src/tests/nix_tests/parse-fail-regression-20060610.nix new file mode 100644 index 000000000000..b1934f7e1e82 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-fail-regression-20060610.nix @@ -0,0 +1,11 @@ +let { + x = + {gcc}: + { + inherit gcc; + }; + + body = ({ + inherit gcc; + }).gcc; +} diff --git a/tvix/eval/src/tests/nix_tests/parse-fail-uft8.nix b/tvix/eval/src/tests/nix_tests/parse-fail-uft8.nix new file mode 100644 index 000000000000..34948d48aed2 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-fail-uft8.nix @@ -0,0 +1 @@ +123 é 4 diff --git a/tvix/eval/src/tests/nix_tests/parse-fail-undef-var-2.nix b/tvix/eval/src/tests/nix_tests/parse-fail-undef-var-2.nix new file mode 100644 index 000000000000..c10a52b1ea42 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-fail-undef-var-2.nix @@ -0,0 +1,7 @@ +let { + + f = {x, y : ["baz" "bar" z "bat"]}: x + y; + + body = f {x = "foo"; y = "bar";}; + +} diff --git a/tvix/eval/src/tests/nix_tests/parse-fail-undef-var.nix b/tvix/eval/src/tests/nix_tests/parse-fail-undef-var.nix new file mode 100644 index 000000000000..7b63008110db --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-fail-undef-var.nix @@ -0,0 +1 @@ +x: y diff --git a/tvix/eval/src/tests/nix_tests/parse-okay-1.nix b/tvix/eval/src/tests/nix_tests/parse-okay-1.nix new file mode 100644 index 000000000000..23a58ed109b1 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-okay-1.nix @@ -0,0 +1 @@ +{x, y, z}: x + y + z diff --git a/tvix/eval/src/tests/nix_tests/parse-okay-crlf.nix b/tvix/eval/src/tests/nix_tests/parse-okay-crlf.nix new file mode 100644 index 000000000000..21518d4c6d80 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-okay-crlf.nix @@ -0,0 +1,17 @@ +rec { + + /* Dit is + een test. */ + + x = + # Dit is een test. y; + + y = 123; + + # CR or CR/LF (but not explicit \r's) in strings should be + # translated to LF. + foo = "multi line + string + test\r"; + + z = 456; } diff --git a/tvix/eval/src/tests/nix_tests/parse-okay-dup-attrs-5.nix b/tvix/eval/src/tests/nix_tests/parse-okay-dup-attrs-5.nix new file mode 100644 index 000000000000..f4b9efd0c596 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-okay-dup-attrs-5.nix @@ -0,0 +1,4 @@ +{ + services.ssh = { enable = true; }; + services.ssh.port = 23; +} diff --git a/tvix/eval/src/tests/nix_tests/parse-okay-dup-attrs-6.nix b/tvix/eval/src/tests/nix_tests/parse-okay-dup-attrs-6.nix new file mode 100644 index 000000000000..ae6d7a769305 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-okay-dup-attrs-6.nix @@ -0,0 +1,4 @@ +{ + services.ssh.port = 23; + services.ssh = { enable = true; }; +} diff --git a/tvix/eval/src/tests/nix_tests/parse-okay-mixed-nested-attrs-1.nix b/tvix/eval/src/tests/nix_tests/parse-okay-mixed-nested-attrs-1.nix new file mode 100644 index 000000000000..fd1001c8cafc --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-okay-mixed-nested-attrs-1.nix @@ -0,0 +1,4 @@ +{ + x = { y = 3; z = 3; }; + x.q = 3; +} diff --git a/tvix/eval/src/tests/nix_tests/parse-okay-mixed-nested-attrs-2.nix b/tvix/eval/src/tests/nix_tests/parse-okay-mixed-nested-attrs-2.nix new file mode 100644 index 000000000000..ad066b680384 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-okay-mixed-nested-attrs-2.nix @@ -0,0 +1,4 @@ +{ + x.q = 3; + x = { y = 3; z = 3; }; +} diff --git a/tvix/eval/src/tests/nix_tests/parse-okay-mixed-nested-attrs-3.nix b/tvix/eval/src/tests/nix_tests/parse-okay-mixed-nested-attrs-3.nix new file mode 100644 index 000000000000..45a33e480373 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-okay-mixed-nested-attrs-3.nix @@ -0,0 +1,7 @@ +{ + services.ssh.enable = true; + services.ssh = { port = 123; }; + services = { + httpd.enable = true; + }; +} diff --git a/tvix/eval/src/tests/nix_tests/parse-okay-regression-20041027.nix b/tvix/eval/src/tests/nix_tests/parse-okay-regression-20041027.nix new file mode 100644 index 000000000000..ae2e256eeaaa --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-okay-regression-20041027.nix @@ -0,0 +1,11 @@ +{stdenv, fetchurl /* pkgconfig, libX11 */ }: + +stdenv.mkDerivation { + name = "libXi-6.0.1"; + src = fetchurl { + url = http://freedesktop.org/~xlibs/release/libXi-6.0.1.tar.bz2; + md5 = "7e935a42428d63a387b3c048be0f2756"; + }; +/* buildInputs = [pkgconfig]; + propagatedBuildInputs = [libX11]; */ +} diff --git a/tvix/eval/src/tests/nix_tests/parse-okay-regression-751.nix b/tvix/eval/src/tests/nix_tests/parse-okay-regression-751.nix new file mode 100644 index 000000000000..05c78b3016d3 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-okay-regression-751.nix @@ -0,0 +1,2 @@ +let const = a: "const"; in +''${ const { x = "q"; }}'' diff --git a/tvix/eval/src/tests/nix_tests/parse-okay-subversion.nix b/tvix/eval/src/tests/nix_tests/parse-okay-subversion.nix new file mode 100644 index 000000000000..356272815d26 --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-okay-subversion.nix @@ -0,0 +1,43 @@ +{ localServer ? false +, httpServer ? false +, sslSupport ? false +, pythonBindings ? false +, javaSwigBindings ? false +, javahlBindings ? false +, stdenv, fetchurl +, openssl ? null, httpd ? null, db4 ? null, expat, swig ? null, j2sdk ? null +}: + +assert expat != null; +assert localServer -> db4 != null; +assert httpServer -> httpd != null && httpd.expat == expat; +assert sslSupport -> openssl != null && (httpServer -> httpd.openssl == openssl); +assert pythonBindings -> swig != null && swig.pythonSupport; +assert javaSwigBindings -> swig != null && swig.javaSupport; +assert javahlBindings -> j2sdk != null; + +stdenv.mkDerivation { + name = "subversion-1.1.1"; + + builder = /foo/bar; + src = fetchurl { + url = http://subversion.tigris.org/tarballs/subversion-1.1.1.tar.bz2; + md5 = "a180c3fe91680389c210c99def54d9e0"; + }; + + # This is a hopefully temporary fix for the problem that + # libsvnjavahl.so isn't linked against libstdc++, which causes + # loading the library into the JVM to fail. + patches = if javahlBindings then [/javahl.patch] else []; + + openssl = if sslSupport then openssl else null; + httpd = if httpServer then httpd else null; + db4 = if localServer then db4 else null; + swig = if pythonBindings || javaSwigBindings then swig else null; + python = if pythonBindings then swig.python else null; + j2sdk = if javaSwigBindings then swig.j2sdk else + if javahlBindings then j2sdk else null; + + inherit expat localServer httpServer sslSupport + pythonBindings javaSwigBindings javahlBindings; +} diff --git a/tvix/eval/src/tests/nix_tests/parse-okay-url.nix b/tvix/eval/src/tests/nix_tests/parse-okay-url.nix new file mode 100644 index 000000000000..08de27d0a4ce --- /dev/null +++ b/tvix/eval/src/tests/nix_tests/parse-okay-url.nix @@ -0,0 +1,8 @@ +[ x:x + https://svn.cs.uu.nl:12443/repos/trace/trunk + http://www2.mplayerhq.hu/MPlayer/releases/fonts/font-arial-iso-8859-1.tar.bz2 + http://losser.st-lab.cs.uu.nl/~armijn/.nix/gcc-3.3.4-static-nix.tar.gz + http://fpdownload.macromedia.com/get/shockwave/flash/english/linux/7.0r25/install_flash_player_7_linux.tar.gz + https://ftp5.gwdg.de/pub/linux/archlinux/extra/os/x86_64/unzip-6.0-14-x86_64.pkg.tar.zst + ftp://ftp.gtk.org/pub/gtk/v1.2/gtk+-1.2.10.tar.gz +] diff --git a/tvix/eval/src/tests/one_offs.rs b/tvix/eval/src/tests/one_offs.rs new file mode 100644 index 000000000000..23bc9465d6bb --- /dev/null +++ b/tvix/eval/src/tests/one_offs.rs @@ -0,0 +1,36 @@ +use crate::*; + +#[test] +fn test_source_builtin() { + // Test an evaluation with a source-only builtin. The test ensures + // that the artificially constructed thunking is correct. + + let mut eval = Evaluation::new_impure("builtins.testSourceBuiltin", None); + eval.src_builtins.push(("testSourceBuiltin", "42")); + + let result = eval.evaluate(); + assert!( + result.errors.is_empty(), + "evaluation failed: {:?}", + result.errors + ); + + let value = result.value.unwrap(); + assert!( + matches!(value, Value::Integer(42)), + "expected the integer 42, but got {}", + value, + ); +} + +#[test] +fn skip_broken_bytecode() { + let result = Evaluation::new(/* code = */ "x", None).evaluate(); + + assert_eq!(result.errors.len(), 1); + + assert!(matches!( + result.errors[0].kind, + ErrorKind::UnknownStaticVariable + )); +} diff --git a/tvix/eval/src/tests/tvix_tests/README.md b/tvix/eval/src/tests/tvix_tests/README.md new file mode 100644 index 000000000000..b493aa81f12f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/README.md @@ -0,0 +1,19 @@ +These tests are "native" to Tvix and exist in addition to the Nix test +suite. + +All of these are straightforward code snippets which are expected to +produce a certain result. + +# `identity-*` tests + +Files named `identity-*.nix` contain code that is supposed to produce +itself exactly after evaluation. + +These are useful for testing literals. + +# `eval-okay-*` tests + +Files named `eval-okay-*.nix` contain code which is supposed to +evaluate to the output in the corresponding `eval-okay-*.exp` file. + +This convention is taken from the original Nix test suite. diff --git a/tvix/eval/src/tests/tvix_tests/directory/default.nix b/tvix/eval/src/tests/tvix_tests/directory/default.nix new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/directory/default.nix @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-fail-builtins-thunk-error.nix b/tvix/eval/src/tests/tvix_tests/eval-fail-builtins-thunk-error.nix new file mode 100644 index 000000000000..bb0d5920d757 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-fail-builtins-thunk-error.nix @@ -0,0 +1 @@ +builtins.genList (_: {}.foo) 1 diff --git a/tvix/eval/src/tests/tvix_tests/eval-fail-builtins-tojson-tostring-notcallable.nix b/tvix/eval/src/tests/tvix_tests/eval-fail-builtins-tojson-tostring-notcallable.nix new file mode 100644 index 000000000000..345b76fde037 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-fail-builtins-tojson-tostring-notcallable.nix @@ -0,0 +1,5 @@ +# attribute sets with a non-callable `__toString` can not be +# serialised to JSON. +builtins.toJSON { + __toString = 42; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-fail-builtins-tojson-tostring-strong.nix b/tvix/eval/src/tests/tvix_tests/eval-fail-builtins-tojson-tostring-strong.nix new file mode 100644 index 000000000000..d1c72dc6783a --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-fail-builtins-tojson-tostring-strong.nix @@ -0,0 +1,6 @@ +# String coercions when using builtins.toJSON on an attribute set with +# a `__toString` attribute should be weak. +builtins.toJSON { + __toString = self: self.x; + x = 42; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-fail-closed-formals.nix b/tvix/eval/src/tests/tvix_tests/eval-fail-closed-formals.nix new file mode 100644 index 000000000000..937604c563e9 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-fail-closed-formals.nix @@ -0,0 +1 @@ +({x}: x) {x = 1; y = 2;} diff --git a/tvix/eval/src/tests/tvix_tests/eval-fail-deep-forced-thunk-error.nix b/tvix/eval/src/tests/tvix_tests/eval-fail-deep-forced-thunk-error.nix new file mode 100644 index 000000000000..b7a758302266 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-fail-deep-forced-thunk-error.nix @@ -0,0 +1 @@ +[ (throw "error!") ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-fail-deepseq.nix b/tvix/eval/src/tests/tvix_tests/eval-fail-deepseq.nix new file mode 100644 index 000000000000..9baa49b063ec --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-fail-deepseq.nix @@ -0,0 +1 @@ +builtins.deepSeq { x = abort "foo"; } 456 diff --git a/tvix/eval/src/tests/tvix_tests/eval-fail-foldlStrict-strict-op-application.nix b/tvix/eval/src/tests/tvix_tests/eval-fail-foldlStrict-strict-op-application.nix new file mode 100644 index 000000000000..adc029b2f29e --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-fail-foldlStrict-strict-op-application.nix @@ -0,0 +1,4 @@ +builtins.foldl' + (_: f: f null) + (throw "This doesn't explode") + [ (_: throw "Not the final value, but is still forced!") (_: 23) ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-fail-force-before-value-pointer-equality.nix b/tvix/eval/src/tests/tvix_tests/eval-fail-force-before-value-pointer-equality.nix new file mode 100644 index 000000000000..de6c94325661 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-fail-force-before-value-pointer-equality.nix @@ -0,0 +1,5 @@ +let + x = throw "I have been forced"; +in + +x == x \ No newline at end of file diff --git a/tvix/eval/src/tests/tvix_tests/eval-fail-getEnv-coercion.nix b/tvix/eval/src/tests/tvix_tests/eval-fail-getEnv-coercion.nix new file mode 100644 index 000000000000..fe48a5690c46 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-fail-getEnv-coercion.nix @@ -0,0 +1 @@ +builtins.getEnv { var = "PATH"; __toString = self: self.var; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-fail-infinite-recursion.nix b/tvix/eval/src/tests/tvix_tests/eval-fail-infinite-recursion.nix new file mode 100644 index 000000000000..5e4fd3789cd6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-fail-infinite-recursion.nix @@ -0,0 +1 @@ +let x = x; in x diff --git a/tvix/eval/src/tests/tvix_tests/eval-fail-outer-value-never-pointer-equal.nix b/tvix/eval/src/tests/tvix_tests/eval-fail-outer-value-never-pointer-equal.nix new file mode 100644 index 000000000000..a8c3cedf61d7 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-fail-outer-value-never-pointer-equal.nix @@ -0,0 +1,7 @@ +# For an explanation of this behavior see //tvix/docs/value-pointer-equality.md +let + x = { foo = throw "foo"; }; +in + +# while `builtins.seq x null` would succeed, this fails! +x == x diff --git a/tvix/eval/src/tests/tvix_tests/eval-fail-parsedrvname-coerce.nix b/tvix/eval/src/tests/tvix_tests/eval-fail-parsedrvname-coerce.nix new file mode 100644 index 000000000000..a1218de3fe58 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-fail-parsedrvname-coerce.nix @@ -0,0 +1 @@ +builtins.parseDrvName { outPath = "lol"; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-fail-remove.nix b/tvix/eval/src/tests/tvix_tests/eval-fail-remove.nix new file mode 100644 index 000000000000..539e0eb0a6f6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-fail-remove.nix @@ -0,0 +1,5 @@ +let { + attrs = {x = 123; y = 456;}; + + body = (removeAttrs attrs ["x"]).x; +} \ No newline at end of file diff --git a/tvix/eval/src/tests/tvix_tests/eval-fail-seq.nix b/tvix/eval/src/tests/tvix_tests/eval-fail-seq.nix new file mode 100644 index 000000000000..cddbbfd3261e --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-fail-seq.nix @@ -0,0 +1 @@ +builtins.seq (abort "foo") 2 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-access-strange-identifier.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-access-strange-identifier.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-access-strange-identifier.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-access-strange-identifier.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-access-strange-identifier.nix new file mode 100644 index 000000000000..8e282d1280f1 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-access-strange-identifier.nix @@ -0,0 +1,8 @@ +let + # There is no syntax for accessing this identifier in an ordinary + # way. + "foo bar" = 42; +in ({ + # but we *can* inherit it back out + inherit "foo bar"; +})."foo bar" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-add-paths.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-add-paths.exp new file mode 100644 index 000000000000..94ba9a881ae6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-add-paths.exp @@ -0,0 +1 @@ +[ /bin /binbar /binbar /binbar /binbar /bin/bar /bin/bin ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-add-paths.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-add-paths.nix new file mode 100644 index 000000000000..462f670882a0 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-add-paths.nix @@ -0,0 +1,9 @@ +[ + (/bin + "/") + (/bin + "bar") + (let name = "bar"; in /bin + name) + (let name = "bar"; in /bin + "${name}") + (let name = "bar"; in /bin + "/" + "${name}") + (let name = "bar"; in /bin + "/${name}") + (/bin + /bin) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-arithmetic-float.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-arithmetic-float.exp new file mode 100644 index 000000000000..08ef6079f899 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-arithmetic-float.exp @@ -0,0 +1 @@ +{ add = 37.34; div = 1.05714; mul = 105.154; sub = 14.35; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-arithmetic-float.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-arithmetic-float.nix new file mode 100644 index 000000000000..9d12aee061b2 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-arithmetic-float.nix @@ -0,0 +1,6 @@ +{ + add = 12.34 + 25.0; + sub = 20.05 - 5.7; + mul = 28.42 * 3.70; + div = 18.5 / 17.5; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-arithmetic-int.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-arithmetic-int.exp new file mode 100644 index 000000000000..a5711e8bfe9b --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-arithmetic-int.exp @@ -0,0 +1 @@ +{ add = 20; div = 3; mul = 8; sub = 15; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-arithmetic-int.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-arithmetic-int.nix new file mode 100644 index 000000000000..c53790db099b --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-arithmetic-int.nix @@ -0,0 +1,6 @@ +{ + add = 15 + 5; + sub = 20 - 5; + mul = 4 * 2; + div = 9 / 3; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-assert-thunk-condition.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-assert-thunk-condition.exp new file mode 100644 index 000000000000..aabe6ec3909c --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-assert-thunk-condition.exp @@ -0,0 +1 @@ +21 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-assert-thunk-condition.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-assert-thunk-condition.nix new file mode 100644 index 000000000000..8934b3d8cf02 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-assert-thunk-condition.nix @@ -0,0 +1,7 @@ +let + condition = x: y: x < y; +in + +# The function application here will become a thunk which verifies that +# assert forces the condition expression correctly. +assert condition 21 42; 21 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-inherit-literal.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-inherit-literal.exp new file mode 100644 index 000000000000..60d3b2f4a4cd --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-inherit-literal.exp @@ -0,0 +1 @@ +15 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-inherit-literal.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-inherit-literal.nix new file mode 100644 index 000000000000..587aec893372 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-inherit-literal.nix @@ -0,0 +1,2 @@ +# the 'from' part of an `inherit` can be any expression. +{ inherit ({a = 15;}) a; }.a diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-simple-inherit.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-simple-inherit.exp new file mode 100644 index 000000000000..a779fce51abc --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-simple-inherit.exp @@ -0,0 +1 @@ +{ a = 1; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-simple-inherit.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-simple-inherit.nix new file mode 100644 index 000000000000..68880bcfd857 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-simple-inherit.nix @@ -0,0 +1,4 @@ +let + a = 1; +in +{ inherit a; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update-empty-lhs.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update-empty-lhs.exp new file mode 100644 index 000000000000..fedf8f25a693 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update-empty-lhs.exp @@ -0,0 +1 @@ +{ a = "ok"; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update-empty-lhs.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update-empty-lhs.nix new file mode 100644 index 000000000000..9596be22b831 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update-empty-lhs.nix @@ -0,0 +1 @@ +{} // { a = "ok"; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update-empty-rhs.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update-empty-rhs.exp new file mode 100644 index 000000000000..fedf8f25a693 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update-empty-rhs.exp @@ -0,0 +1 @@ +{ a = "ok"; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update-empty-rhs.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update-empty-rhs.nix new file mode 100644 index 000000000000..117c01141357 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update-empty-rhs.nix @@ -0,0 +1 @@ +{ a = "ok"; } // {} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update-kv-lhs.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update-kv-lhs.exp new file mode 100644 index 000000000000..c2234a47e2b8 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update-kv-lhs.exp @@ -0,0 +1 @@ +{ name = "foo"; other = 42; value = "bar"; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update-kv-lhs.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update-kv-lhs.nix new file mode 100644 index 000000000000..6f71684902e5 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update-kv-lhs.nix @@ -0,0 +1 @@ +{ name = "foo"; value = "bar"; } // { other = 42; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update.exp new file mode 100644 index 000000000000..57f4d541bd85 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update.exp @@ -0,0 +1 @@ +{ a = 15; b = "works"; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update.nix new file mode 100644 index 000000000000..735602fe02d5 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-attrs-update.nix @@ -0,0 +1 @@ +{ a = 15; } // { b = "works"; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-basenameof.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-basenameof.exp new file mode 100644 index 000000000000..60a773f4af69 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-basenameof.exp @@ -0,0 +1 @@ +[ "bar" "foo" "" "bar" "." "" "" "" ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-basenameof.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-basenameof.nix new file mode 100644 index 000000000000..bc59613f541d --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-basenameof.nix @@ -0,0 +1,10 @@ +[ + (builtins.baseNameOf /foo/bar) + (builtins.baseNameOf "foo") + (builtins.baseNameOf "foo///") + (builtins.baseNameOf "foo/bar") + (builtins.baseNameOf "./.") + (builtins.baseNameOf "") + (builtins.baseNameOf /.) + (builtins.toString (builtins.baseNameOf /.)) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-add.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-add.exp new file mode 100644 index 000000000000..c3ac813de6b9 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-add.exp @@ -0,0 +1 @@ +[ 18 18.9 18.9 19.1 19 42 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-add.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-add.nix new file mode 100644 index 000000000000..b04b1d1fa6ba --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-add.nix @@ -0,0 +1,8 @@ +[ + (builtins.add 7 11) + (builtins.add 7.9 11) + (builtins.add 7 11.9) + (builtins.add 7.2 11.9) + (builtins.add 7.1 11.9) + (builtins.add (builtins.add 21 10) 11) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-all.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-all.exp new file mode 100644 index 000000000000..82ca7e6b6db7 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-all.exp @@ -0,0 +1 @@ +[ true true false false false false true true false ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-all.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-all.nix new file mode 100644 index 000000000000..12d62632dd6c --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-all.nix @@ -0,0 +1,15 @@ +[ + (builtins.all (x: x) [ ]) + (builtins.all (x: x) [ true true true ]) + (builtins.all (x: x) [ false false false ]) + (builtins.all (x: x) [ true true false ]) + (builtins.all (x: x) [ false true true ]) + + # evaluation should short-circuit + (builtins.all (x: x) [ true false (builtins.abort "should be unreachable") ]) + + # arbitrary functions supported + (builtins.all (x: x * 2 == 42) [ ]) + (builtins.all (x: x * 2 == 42) [ 21 21 21 ]) + (builtins.all (x: x * 2 == 42) [ 1 2 3 ]) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-any.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-any.exp new file mode 100644 index 000000000000..d6846ac3f756 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-any.exp @@ -0,0 +1 @@ +[ false true false true true true false true false ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-any.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-any.nix new file mode 100644 index 000000000000..2c659f130b65 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-any.nix @@ -0,0 +1,15 @@ +[ + (builtins.any (x: x) [ ]) + (builtins.any (x: x) [ true true true ]) + (builtins.any (x: x) [ false false false ]) + (builtins.any (x: x) [ true true false ]) + (builtins.any (x: x) [ false true true ]) + + # evaluation should short-circuit + (builtins.any (x: x) [ false true (builtins.abort "should be unreachable") ]) + + # arbitrary functions supported + (builtins.any (x: x * 2 == 42) [ ]) + (builtins.any (x: x * 2 == 42) [ 7 21 42 ]) + (builtins.any (x: x * 2 == 42) [ 1 2 3 ]) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-attrnames.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-attrnames.exp new file mode 100644 index 000000000000..6521066a8ea5 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-attrnames.exp @@ -0,0 +1 @@ +[ [ ] [ "bar" "baz" "foo" ] [ "Baz" "Foo" "bar" ] [ "Eric Idle" "Graham Chapman" "John Cleese" "Michael Palin" "Terry Gilliam" "Terry Jones" ] ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-attrnames.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-attrnames.nix new file mode 100644 index 000000000000..67f7dcee5672 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-attrnames.nix @@ -0,0 +1,13 @@ +[ + (builtins.attrNames {}) + (builtins.attrNames { foo = 1; bar = 2; baz = 3; }) + (builtins.attrNames { Foo = 1; bar = 2; Baz = 3; }) + (builtins.attrNames { + "Graham Chapman" = true; + "John Cleese" = true; + "Terry Gilliam" = true; + "Eric Idle" = true; + "Terry Jones" = true; + "Michael Palin" = true; + }) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-attrvalues.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-attrvalues.exp new file mode 100644 index 000000000000..35c369772038 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-attrvalues.exp @@ -0,0 +1 @@ +[ [ ] [ 2 3 1 ] ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-attrvalues.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-attrvalues.nix new file mode 100644 index 000000000000..5eced31f0d2d --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-attrvalues.nix @@ -0,0 +1,4 @@ +[ + (builtins.attrValues {}) + (builtins.attrValues { foo = 1; bar = 2; baz = 3; }) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-bitand.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-bitand.exp new file mode 100644 index 000000000000..30b348853e44 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-bitand.exp @@ -0,0 +1 @@ +[ 0 0 0 1 8 8 8 8 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-bitand.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-bitand.nix new file mode 100644 index 000000000000..af40005ed983 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-bitand.nix @@ -0,0 +1,10 @@ +[ + (builtins.bitAnd 0 0) + (builtins.bitAnd 0 1) + (builtins.bitAnd 1 0) + (builtins.bitAnd 1 1) + (builtins.bitAnd 8 8) + (builtins.bitAnd 8 (builtins.add 4 4)) + (builtins.bitAnd (builtins.add 4 4) 8) + (builtins.bitAnd (builtins.add 4 4) (builtins.add 4 4)) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-bitor.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-bitor.exp new file mode 100644 index 000000000000..2556b4183c77 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-bitor.exp @@ -0,0 +1 @@ +[ 0 1 1 1 8 8 8 8 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-bitor.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-bitor.nix new file mode 100644 index 000000000000..9c28f6d7acee --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-bitor.nix @@ -0,0 +1,10 @@ +[ + (builtins.bitOr 0 0) + (builtins.bitOr 1 0) + (builtins.bitOr 0 1) + (builtins.bitOr 1 1) + (builtins.bitOr 8 8) + (builtins.bitOr 8 (builtins.add 4 4)) + (builtins.bitOr (builtins.add 4 4) 8) + (builtins.bitOr (builtins.add 4 4) (builtins.add 4 4)) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-bitxor.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-bitxor.exp new file mode 100644 index 000000000000..457157d45970 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-bitxor.exp @@ -0,0 +1 @@ +[ 0 1 1 0 8 8 0 0 0 0 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-bitxor.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-bitxor.nix new file mode 100644 index 000000000000..80e363fb0748 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-bitxor.nix @@ -0,0 +1,12 @@ +[ + (builtins.bitXor 0 0) + (builtins.bitXor 1 0) + (builtins.bitXor 0 1) + (builtins.bitXor 1 1) + (builtins.bitXor 8 0) + (builtins.bitXor 0 8) + (builtins.bitXor 8 8) + (builtins.bitXor 8 (builtins.add 4 4)) + (builtins.bitXor (builtins.add 4 4) 8) + (builtins.bitXor (builtins.add 4 4) (builtins.add 4 4)) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-builtins.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-builtins.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-builtins.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-builtins.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-builtins.nix new file mode 100644 index 000000000000..cfbcbbb7684d --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-builtins.nix @@ -0,0 +1 @@ +[ builtins ] == [ builtins.builtins ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-catAttrs.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-catAttrs.exp new file mode 100644 index 000000000000..f8c0b2de5fba --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-catAttrs.exp @@ -0,0 +1 @@ +[ 21 "+" 21 "=" 42 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-catAttrs.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-catAttrs.nix new file mode 100644 index 000000000000..edac76d4461a --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-catAttrs.nix @@ -0,0 +1,10 @@ +builtins.catAttrs "foo" [ + { foo = 21; } + { bar = 23; foo = "+"; } + { } + { bar = 12; } + { foo = 21 + 0; } + { foo = "="; } + ({ bar = 13; } // { baz = 89; }) + { foo = 42; bar = 33; } +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-compareVersions.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-compareVersions.exp new file mode 100644 index 000000000000..e69498c3e1c3 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-compareVersions.exp @@ -0,0 +1 @@ +[ 0 -1 -1 0 0 0 1 1 -1 1 -1 1 -1 -1 -1 -1 0 1 -1 -1 1 -1 -1 0 1 1 1 1 -1 -1 -1 -1 -1 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-compareVersions.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-compareVersions.nix new file mode 100644 index 000000000000..cce554b4494a --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-compareVersions.nix @@ -0,0 +1,46 @@ +let + cmp = a: b: + let + ord1 = builtins.compareVersions a b; + ord2 = builtins.compareVersions b a; + in + assert ord1 == -ord2; ord1; +in + +[ + (cmp "1.2.3" "1.2.3") + (cmp "1.2.2" "1.2.3") + (cmp "1.2.3" "1.2.40") + (cmp "1.2.3" ".1.2.3") + (cmp "1.2.3" "1..2.3") + (cmp "1.2.3" "1.2.3.") + (cmp "1.2.3" "1.2") + (cmp "1.2.3" "1.2.a") + (cmp "1a.b" "1a.2") + (cmp "1" "") + (cmp "1.0" "1.0.0") + (cmp "2.3" "2.3pre") + (cmp "2.3" "2.3.0pre") + (cmp "2.3pre" "2.3.0pre") + (cmp "2.3" "2.3prepre") + (cmp "2.3pre" "2.3prepre") + (cmp "2.3prepre" "2.3prepre") + # check that the plain word comparison (via Ord) behaves the same + (cmp "foo" "bar") + (cmp "FoO" "fOo") + (cmp "foo" "fooo") + (cmp "foopre" "foo") + # Subset of test cases from eval-okay-versions.nix shipped by C++ Nix + (cmp "1.0" "2.3") + (cmp "2.1" "2.3") + (cmp "2.3" "2.3") + (cmp "2.5" "2.3") + (cmp "3.1" "2.3") + (cmp "2.3.1" "2.3") + (cmp "2.3.1" "2.3a") + (cmp "2.3pre1" "2.3") + (cmp "2.3pre3" "2.3pre12") + (cmp "2.3a" "2.3c") + (cmp "2.3pre1" "2.3c") + (cmp "2.3pre1" "2.3q") +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-concat-lists.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-concat-lists.exp new file mode 100644 index 000000000000..64ae529ac257 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-concat-lists.exp @@ -0,0 +1 @@ +[ [ ] [ 1 2 3 4 5 6 ] [ [ 1 ] [ 2 ] [ 3 ] ] [ 1 2 3 4 5 6 ] ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-concat-lists.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-concat-lists.nix new file mode 100644 index 000000000000..19ef5eba1145 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-concat-lists.nix @@ -0,0 +1,6 @@ +[ + (builtins.concatLists [ ]) + (builtins.concatLists [ [ 1 2 ] [ 3 4 ] [ 5 6 ] ]) + (builtins.concatLists [ [ [ 1 ] [ 2 ] ] [ [ 3 ] ] [ ] ]) + (builtins.concatLists [ [ 1 2 ] [ ] [ 3 4 ] [ ] [ 5 6 ] ]) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-div.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-div.exp new file mode 100644 index 000000000000..73e9bc33b083 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-div.exp @@ -0,0 +1 @@ +[ 3 7 0 1 0 0.5 0.5 0.5 42 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-div.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-div.nix new file mode 100644 index 000000000000..98b8b74bdf2b --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-div.nix @@ -0,0 +1,11 @@ +[ + (builtins.div 9 3) + (builtins.div 7 1) + (builtins.div 3 9) + (builtins.div 4 4) + (builtins.div 1 2) + (builtins.div 1.0 2) + (builtins.div 1 2.0) + (builtins.div 1.0 2.0) + (builtins.div (builtins.div 84 4) 0.5) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-elemat.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-elemat.exp new file mode 100644 index 000000000000..3701c9d75f94 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-elemat.exp @@ -0,0 +1 @@ +[ "foo" "bar" "baz" ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-elemat.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-elemat.nix new file mode 100644 index 000000000000..762adeebbf0a --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-elemat.nix @@ -0,0 +1,5 @@ +[ + (builtins.elemAt [ "foo" "bar" "baz" ] 0) + (builtins.elemAt [ "foo" "bar" "baz" ] 1) + (builtins.elemAt [ "foo" "bar" "baz" ] 2) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-filter.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-filter.exp new file mode 100644 index 000000000000..fb94ebaa492c --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-filter.exp @@ -0,0 +1 @@ +[ [ 1 2 3 4 5 ] [ ] [ 2 2 2 ] [ [ 1 2 ] [ 3 4 ] ] ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-filter.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-filter.nix new file mode 100644 index 000000000000..b621fdb43e7c --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-filter.nix @@ -0,0 +1,13 @@ +[ + (builtins.filter (_: true) [ 1 2 3 4 5 ]) + (builtins.filter (_: false) [ 1 2 3 4 5 ]) + (builtins.filter (x: x == 2) [ 1 2 1 2 1 2 ]) + + (builtins.filter (x: (builtins.length x) > 0) [ + [ ] + [ 1 2 ] + [ ] + [ ] + [ 3 4 ] + ]) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-getattr.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-getattr.exp new file mode 100644 index 000000000000..89fa6c681064 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-getattr.exp @@ -0,0 +1 @@ +[ 1 2 3 { bar = { baz = 3; }; } ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-getattr.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-getattr.nix new file mode 100644 index 000000000000..87a2adbcd3dd --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-getattr.nix @@ -0,0 +1,6 @@ +[ + (builtins.getAttr "foo" { foo = 1; bar = 2; baz = 3; }) + (builtins.getAttr "bar" { foo = 1; bar = 2; baz = 3; }) + (builtins.getAttr "baz" { foo = 1; bar = 2; baz = 3; }) + (builtins.getAttr "foo" { foo = { bar = { baz = 3; }; }; }) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-groupby-thunk.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-groupby-thunk.exp new file mode 100644 index 000000000000..94649819caab --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-groupby-thunk.exp @@ -0,0 +1 @@ +{ fred = [ { x = "fred"; y = "fred"; } ]; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-groupby-thunk.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-groupby-thunk.nix new file mode 100644 index 000000000000..d62ae628dc71 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-groupby-thunk.nix @@ -0,0 +1,6 @@ + +builtins.groupBy + (v: v.x) + [ (rec { y = x; x = "fred"; }) ] + + diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-hasattr.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-hasattr.exp new file mode 100644 index 000000000000..541fe347cbcd --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-hasattr.exp @@ -0,0 +1 @@ +[ true true true false false true false ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-hasattr.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-hasattr.nix new file mode 100644 index 000000000000..e87e186b641d --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-hasattr.nix @@ -0,0 +1,9 @@ +[ + (builtins.hasAttr "foo" { foo = 1; bar = 2; baz = 3; }) + (builtins.hasAttr "bar" { foo = 1; bar = 2; baz = 3; }) + (builtins.hasAttr "baz" { foo = 1; bar = 2; baz = 3; }) + (builtins.hasAttr "FOO" { foo = 1; bar = 2; baz = 3; }) + (builtins.hasAttr "foo" {}) + (builtins.hasAttr ("f" + "o" + "o") { foo = 1; }) + (builtins.hasAttr ("b" + "a" + "r") { foo = 1; }) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-head.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-head.exp new file mode 100644 index 000000000000..afe288459f2e --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-head.exp @@ -0,0 +1 @@ +[ "foo" 1 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-head.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-head.nix new file mode 100644 index 000000000000..1741a7aac4bb --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-head.nix @@ -0,0 +1,4 @@ +[ + (builtins.head [ "foo" ]) + (builtins.head [ 1 2 3 ]) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-length.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-length.exp new file mode 100644 index 000000000000..e80eb6ef1465 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-length.exp @@ -0,0 +1 @@ +[ 0 1 3 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-length.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-length.nix new file mode 100644 index 000000000000..ef1f638cbbdc --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-length.nix @@ -0,0 +1,5 @@ +[ + (builtins.length []) + (builtins.length [ 1 ]) + (builtins.length [ "one" "two" "three" ]) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-lessThan.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-lessThan.exp new file mode 100644 index 000000000000..31f4598bb504 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-lessThan.exp @@ -0,0 +1 @@ +[ true true true true false false false false true true true true false ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-lessThan.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-lessThan.nix new file mode 100644 index 000000000000..cd2d0c209c3a --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-lessThan.nix @@ -0,0 +1,15 @@ +[ + (builtins.lessThan 2 3) + (builtins.lessThan 2.0 3) + (builtins.lessThan 2 3.0) + (builtins.lessThan 2.0 3.0) + (builtins.lessThan 3 2) + (builtins.lessThan 3.0 2) + (builtins.lessThan 3 2.0) + (builtins.lessThan 3.0 2.0) + (builtins.lessThan 10 (builtins.add 9 2)) + (builtins.lessThan (builtins.add 9 1) 11) + (builtins.lessThan (builtins.add 9 1) (builtins.add 9 2)) + (builtins.lessThan "a" "b") + (builtins.lessThan "b" "a") +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-map.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-map.exp new file mode 100644 index 000000000000..6cf53040320f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-map.exp @@ -0,0 +1 @@ +[ [ 1 2 3 4 5 ] [ 2 4 6 8 10 ] [ 2 4 6 8 10 ] [ 2 4 6 8 10 ] [ 1 2 3 4 5 ] ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-map.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-map.nix new file mode 100644 index 000000000000..71b351fd55b0 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-map.nix @@ -0,0 +1,19 @@ +[ + # identity function + (builtins.map (x: x) [ 1 2 3 4 5 ]) + + # double stuff + (builtins.map (x: x * 2) [ 1 2 3 4 5 ]) + + # same but with a closure this time + ( + let n = 2; + in builtins.map (x: x * n) [ 1 2 3 4 5 ] + ) + + # same, but with a builtin + (builtins.map (builtins.mul 2) [ 1 2 3 4 5 ]) + + # from global scope + (map (x: x) [ 1 2 3 4 5 ]) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-mul.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-mul.exp new file mode 100644 index 000000000000..e3e0f03a8af5 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-mul.exp @@ -0,0 +1 @@ +[ 36 0 0 14 42 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-mul.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-mul.nix new file mode 100644 index 000000000000..2a8d6c4214c3 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-mul.nix @@ -0,0 +1,7 @@ +[ + (builtins.mul 4 9) + (builtins.mul 0 7) + (builtins.mul 7 0) + (builtins.mul 7 2) + (builtins.mul (builtins.mul 4 0.5) 21) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-partition.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-partition.exp new file mode 100644 index 000000000000..d2390db4f528 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-partition.exp @@ -0,0 +1 @@ +[ { right = [ 1 2 3 4 5 ]; wrong = [ ]; } { right = [ ]; wrong = [ 1 2 3 4 5 ]; } { right = [ 2 ]; wrong = [ 1 3 4 5 ]; } { right = [ [ 1 2 ] [ 3 4 ] ]; wrong = [ [ 1 ] [ 2 ] [ 3 ] ]; } ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-partition.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-partition.nix new file mode 100644 index 000000000000..0587330ff917 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-partition.nix @@ -0,0 +1,13 @@ +[ + (builtins.partition (_: true) [ 1 2 3 4 5]) + (builtins.partition (_: false) [ 1 2 3 4 5]) + (builtins.partition (x: x == 2) [ 1 2 3 4 5]) + + (builtins.partition (x: (builtins.length x) > 1) [ + [ 1 ] + [ 1 2 ] + [ 2 ] + [ 3 ] + [ 3 4 ] + ]) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-replaceStrings.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-replaceStrings.exp new file mode 100644 index 000000000000..c2cb89bac663 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-replaceStrings.exp @@ -0,0 +1 @@ +[ "fabir" "a" "1a1" ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-replaceStrings.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-replaceStrings.nix new file mode 100644 index 000000000000..b8101c448bb0 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-replaceStrings.nix @@ -0,0 +1,5 @@ +[ + (builtins.replaceStrings ["oo" "a"] ["a" "i"] "foobar") + (builtins.replaceStrings ["o"] ["a"] "a") + (builtins.replaceStrings ["" ""] ["1" "2"] "a") +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-splitVersion.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-splitVersion.exp new file mode 100644 index 000000000000..222a0093f570 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-splitVersion.exp @@ -0,0 +1 @@ +[ [ "1" "2" "3" ] [ "2" "3" "16" ] [ "22" "11" "pre" "408963" "823" "e" "2" "c" "9" "b" "0" "a" "0" ] [ "9" "4" "1" "rc" "1" ] [ "9" "4" "0" "20220721" ] [ "0" "1" "alpha" ] [ "unstable" "2022" "09" "20" ] [ "30" "pre" "9" ] [ "0" "pre+date=" "2021" "11" "30" ] [ "1" "2" "0" "_pre" "23" ] [ "0" "1" "0" "pre" "71" "_" "170" "f" "840" ] ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-splitVersion.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-splitVersion.nix new file mode 100644 index 000000000000..4083e86714fc --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-splitVersion.nix @@ -0,0 +1,13 @@ +[ + (builtins.splitVersion "1.2.3") + (builtins.splitVersion "2.3.16") + (builtins.splitVersion "22.11pre408963.823e2c9b0a0") + (builtins.splitVersion "9.4.1-rc1") + (builtins.splitVersion "9.4.0.20220721") + (builtins.splitVersion "0.1-alpha") + (builtins.splitVersion "unstable-2022-09-20") + (builtins.splitVersion "30.pre9") + (builtins.splitVersion "0.pre+date=2021-11-30") + (builtins.splitVersion "1.2.0_pre23") + (builtins.splitVersion "0.1.0pre71_170f840") +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-string-length.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-string-length.exp new file mode 100644 index 000000000000..b019be4bfd7a --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-string-length.exp @@ -0,0 +1 @@ +[ 3 "hello" 9 4 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-string-length.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-string-length.nix new file mode 100644 index 000000000000..b7d51db3c578 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-string-length.nix @@ -0,0 +1,10 @@ +[ + (builtins.stringLength "foo") + (let s = "hello"; in (builtins.substring 0 (builtins.stringLength s) s)) + (builtins.stringLength ("foo" + "${"bar" + "baz"}")) + + # feel free to delete this test case at any time, it's just to show: This is a + # thing at the moment. We may want to break compatibility with this aspect of + # the C++ Nix implementation at any time. + (builtins.stringLength "😀") +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-sub.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-sub.exp new file mode 100644 index 000000000000..51842eccfac5 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-sub.exp @@ -0,0 +1 @@ +[ -4 -3.1 -4.9 -4.7 -4 42 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-sub.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-sub.nix new file mode 100644 index 000000000000..2929c4dddd81 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-sub.nix @@ -0,0 +1,8 @@ +[ + (builtins.sub 7 11) + (builtins.sub 7.9 11) + (builtins.sub 7 11.9) + (builtins.sub 7.2 11.9) + (builtins.sub 7.9 11.9) + (builtins.sub (builtins.sub 123 23) 58) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-substring-coerce.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-substring-coerce.exp new file mode 100644 index 000000000000..192548e94978 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-substring-coerce.exp @@ -0,0 +1 @@ +"42" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-substring-coerce.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-substring-coerce.nix new file mode 100644 index 000000000000..626ae1d1be55 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-substring-coerce.nix @@ -0,0 +1,5 @@ +# builtins.substring uses string coercion internally + +builtins.substring 0 2 { + __toString = _: "4200"; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-substring.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-substring.exp new file mode 100644 index 000000000000..168276022898 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-substring.exp @@ -0,0 +1 @@ +[ "tes" "testing" "" "estin" "ting" "" "" "" "" "est" "est" "est" "est" "est" "est" "" ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-substring.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-substring.nix new file mode 100644 index 000000000000..f4ee82e2736f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-substring.nix @@ -0,0 +1,18 @@ +[ + (builtins.substring 0 3 "testing") + (builtins.substring 0 300 "testing") + (builtins.substring 3 0 "testing") + (builtins.substring 1 5 "testing") + (builtins.substring 3 5 "testing") + (builtins.substring 300 300 "testing") + (builtins.substring 301 300 "testing") + (builtins.substring 0 0 "") + (builtins.substring 0 1 "") + (builtins.substring (builtins.add 0 1) 3 "testing") + (builtins.substring 1 (builtins.add 3 0) "testing") + (builtins.substring (builtins.add 0 1) (builtins.add 3 0) "testing") + (builtins.substring (builtins.add 0 1) (builtins.add 3 0) "testing") + (builtins.substring (builtins.add 0 1) (builtins.add 3 0) ("test" + "ing")) + (builtins.substring (builtins.add 0 1) (builtins.add 3 0) ("test" + "ing")) + (builtins.substring 300 (-10) "testing") +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tail.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tail.exp new file mode 100644 index 000000000000..b9e3aa1ef79d --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tail.exp @@ -0,0 +1 @@ +[ [ ] [ 2 3 ] ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tail.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tail.nix new file mode 100644 index 000000000000..2be9496a98e2 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tail.nix @@ -0,0 +1,4 @@ +[ + (builtins.tail [ "foo" ]) + (builtins.tail [ 1 2 3 ]) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-thunked-function-calls.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-thunked-function-calls.exp new file mode 100644 index 000000000000..3d4204d5a83e --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-thunked-function-calls.exp @@ -0,0 +1 @@ +[ 2 [ "Hans" "James" "Joachim" ] 2 [ "Clawdia" "Mynheer" ] 981 3 2 2 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-thunked-function-calls.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-thunked-function-calls.nix new file mode 100644 index 000000000000..d96ddb3bd16d --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-thunked-function-calls.nix @@ -0,0 +1,31 @@ +[ + # This is independent of builtins + (builtins.length [ (builtins.throw "Ferge") (builtins.throw "Wehsal") ]) + (builtins.attrNames { + Hans = throw "Castorp"; + Joachim = throw "Ziemßen"; + James = "Tienappel"; + }) + + (builtins.length (builtins.map builtins.throw [ "Settembrini" "Naphta" ])) + + (builtins.attrNames (builtins.mapAttrs builtins.throw { + Clawdia = "Chauchat"; + Mynheer = "Peeperkorn"; + })) + + (builtins.length (builtins.genList (builtins.add "Marusja") 981)) + (builtins.length (builtins.genList builtins.throw 3)) + + # These are hard to get wrong since the outer layer needs to be forced anyways + (builtins.length (builtins.genericClosure { + startSet = [ + { key = 1; initial = true; } + ]; + operator = { key, initial, ... }: + if initial + then [ { key = key - 1; initial = false; value = throw "lol"; } ] + else [ ]; + })) + (builtins.length (builtins.concatMap (m: [ m (builtins.throw m) ]) [ "Marusja" ])) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-toString.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-toString.exp new file mode 100644 index 000000000000..a148ebc3b53f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-toString.exp @@ -0,0 +1 @@ +[ "1" "4.200000" "" "" "1" "foo" "/etc" "Hello World" "Hello World" "1" "out" "2" "1 4.200000 1 foo /etc Hello World Hello World 1 out 2" ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-toString.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-toString.nix new file mode 100644 index 000000000000..e4dc18ac96a7 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-toString.nix @@ -0,0 +1,23 @@ +let + toStringableSet = { + __toString = self: self.content; + content = "Hello World"; + }; + + toStringExamples = [ + (toString 1) + (toString 4.2) + (toString null) + (toString false) + (toString true) + (toString "foo") + (toString /etc) + (toString toStringableSet) + (toString { __toString = _: toStringableSet; }) + (toString { __toString = _: true; }) + (toString { outPath = "out"; }) + (toString { outPath = { outPath = { __toString = _: 2; }; }; }) + ]; +in + +toStringExamples ++ [ (toString toStringExamples) ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-literals.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-literals.exp new file mode 100644 index 000000000000..0a274c201fa8 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-literals.exp @@ -0,0 +1 @@ +"[42,\"hello\",13.37,[],[1,2,3],{},{\"name\":\"foo\",\"value\":42},{\"foo\":42}]" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-literals.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-literals.nix new file mode 100644 index 000000000000..12e8c03b171d --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-literals.nix @@ -0,0 +1,11 @@ +# tests serialisation of literal data +builtins.toJSON [ + 42 + "hello" + 13.37 + [ ] + [ 1 2 3 ] + { } + { name = "foo"; value = 42; } + { foo = 42; } +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-outpath-nested.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-outpath-nested.exp new file mode 100644 index 000000000000..69667de5a1c3 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-outpath-nested.exp @@ -0,0 +1 @@ +"{\"a\":40,\"b\":2}" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-outpath-nested.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-outpath-nested.nix new file mode 100644 index 000000000000..70755c8c6dbe --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-outpath-nested.nix @@ -0,0 +1,8 @@ +# Attribute sets with an `outPath` can contain _any_ serialisable +# value in that field. +builtins.toJSON { + outPath = { + a = 40; + b = 2; + }; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-outpath.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-outpath.exp new file mode 100644 index 000000000000..82dd08179843 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-outpath.exp @@ -0,0 +1 @@ +"\"/nix/store/jzka5ndnygkkfjfvpqwjipqp75lhz138-emacs-28.2\"" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-outpath.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-outpath.nix new file mode 100644 index 000000000000..7f9d95ac60f4 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-outpath.nix @@ -0,0 +1,5 @@ +# Attribute sets with an `outPath` have that outPath itself serialised +# to string. +builtins.toJSON { + outPath = "/nix/store/jzka5ndnygkkfjfvpqwjipqp75lhz138-emacs-28.2"; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-thunks.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-thunks.exp new file mode 100644 index 000000000000..9ccd94224ba8 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-thunks.exp @@ -0,0 +1 @@ +"[42,42,\"42\"]" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-thunks.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-thunks.nix new file mode 100644 index 000000000000..16234ab4514a --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-thunks.nix @@ -0,0 +1,9 @@ +let + a = b * 2; + b = 21; +in +builtins.toJSON [ + a + ((n: n * 2) 21) + (builtins.toJSON a) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-tostring.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-tostring.exp new file mode 100644 index 000000000000..2661fd257bf9 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-tostring.exp @@ -0,0 +1 @@ +"\"it's 42\"" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-tostring.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-tostring.nix new file mode 100644 index 000000000000..ec6f8d947cec --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-tojson-tostring.nix @@ -0,0 +1,8 @@ +# Attribute sets with a `__toString` attribute JSON-serialise with a +# string coercion of the function call result. + +builtins.toJSON { + __toString = self: "it's " + (builtins.toString (self.x * self.y)); + x = 21; + y = 2; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-type-of.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-type-of.exp new file mode 100644 index 000000000000..1ea054fc2d72 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-type-of.exp @@ -0,0 +1 @@ +[ "null" "bool" "bool" "int" "int" "float" "string" "string" "set" "set" "list" "lambda" "path" ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-type-of.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-type-of.nix new file mode 100644 index 000000000000..a3cb659ecfde --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-type-of.nix @@ -0,0 +1,22 @@ +let + fix = f: let x = f x; in x; +in + +fix (self: + [ + (builtins.typeOf null) + (builtins.typeOf true) + (builtins.typeOf (true && false)) + (builtins.typeOf 12) + (builtins.typeOf (builtins.add 21 21)) + (builtins.typeOf 1.2) + (builtins.typeOf "foo") + (builtins.typeOf "${"foo" + "bar"}baz") + (builtins.typeOf {}) + # (builtins.typeOf { foo.bar = 32; }.foo) # TODO: re-enable when nested keys are done + (builtins.typeOf ({ name = "foo"; value = 13; } // { name = "bar"; })) + (builtins.typeOf self) + (builtins.typeOf fix) + (builtins.typeOf /nix/store) + ] +) diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-type-predicates.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-type-predicates.exp new file mode 100644 index 000000000000..724c1f9c34b9 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-type-predicates.exp @@ -0,0 +1 @@ +[ true true false true true false true true false true true false true true false true true false true true false true true false true true true false ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-type-predicates.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-type-predicates.nix new file mode 100644 index 000000000000..3d688cb8bd68 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-type-predicates.nix @@ -0,0 +1,34 @@ +let + # apply is thunked, so we can create a thunked value using the identity function + thunk = x: x; +in +[ + (builtins.isAttrs { bar = throw "baz"; }) + (builtins.isAttrs (thunk { foo = 13; })) + (builtins.isAttrs (thunk 123)) + (builtins.isBool true) + (builtins.isBool (thunk false)) + (builtins.isBool (thunk "lol")) + (builtins.isFloat 1.2) + (builtins.isFloat (thunk (1 * 1.0))) + (builtins.isFloat 1) + (builtins.isFunction thunk) + (builtins.isFunction (thunk thunk)) + (builtins.isFunction {}) + (builtins.isInt 1) + (builtins.isInt (thunk 42)) + (builtins.isInt 1.0) + (builtins.isList [ (throw "oh no") (abort "it's over") ]) + (builtins.isList (thunk [ 21 21 ])) + (builtins.isList (thunk {})) + (builtins.isNull null) + (builtins.isNull (thunk null)) + (builtins.isNull 42) + (builtins.isPath ./relative) + (builtins.isPath (thunk /absolute)) + (builtins.isPath "/not/a/path") + (builtins.isString "simple") + (builtins.isString "${{ outPath = "coerced"; }}") + (builtins.isString "hello ${"interpolation"}") + (builtins.isString true) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-ceil.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-ceil.exp new file mode 100644 index 000000000000..dffbbe59f0a0 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-ceil.exp @@ -0,0 +1 @@ +[ 4 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-ceil.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-ceil.nix new file mode 100644 index 000000000000..5835bf829b03 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-ceil.nix @@ -0,0 +1 @@ +[ (builtins.ceil 3.4) ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-closure-pointer-compare.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-closure-pointer-compare.exp new file mode 100644 index 000000000000..c3bb809c9ffb --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-closure-pointer-compare.exp @@ -0,0 +1 @@ +[ false false ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-closure-pointer-compare.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-closure-pointer-compare.nix new file mode 100644 index 000000000000..7c4333668df8 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-closure-pointer-compare.nix @@ -0,0 +1,14 @@ +# For an explanation of this behavior see //tvix/docs/value-pointer-equality.md +let + g = x: + owo: "th" + x; +in +[ + ( + { q = g "ia"; } == { q = g ("i"+"a"); } + ) + + ( + [ (g "ia") ] == [ (g ("i"+"a")) ] + ) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-closure-self.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-closure-self.exp new file mode 100644 index 000000000000..be54b4b4e39e --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-closure-self.exp @@ -0,0 +1 @@ +"done" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-closure-self.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-closure-self.nix new file mode 100644 index 000000000000..bda364f42992 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-closure-self.nix @@ -0,0 +1,4 @@ +let + # self-recursive function should be able to close over itself + f = n: if n <= 0 then "done" else f (n - 1); +in f 10 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-closure-with-shadowing.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-closure-with-shadowing.exp new file mode 100644 index 000000000000..d00491fd7e5b --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-closure-with-shadowing.exp @@ -0,0 +1 @@ +1 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-closure-with-shadowing.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-closure-with-shadowing.nix new file mode 100644 index 000000000000..305463775217 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-closure-with-shadowing.nix @@ -0,0 +1,14 @@ +# If a closure closes over a variable that is statically known *and* +# available dynamically through `with`, the statically known one must +# have precedence. + +let + # introduce statically known `a` (this should be the result) + a = 1; +in + +# introduce some closure depth to force both kinds of upvalue +# resolution, and introduce a dynamically known `a` within the +# closures +let f = b: with { a = 2; }; c: a + b + c; +in f 0 0 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-float-false.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-float-false.exp new file mode 100644 index 000000000000..95a0e7378b5e --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-float-false.exp @@ -0,0 +1 @@ +{ eq = false; ge = false; gt = false; le = false; lt = false; ne = false; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-float-false.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-float-false.nix new file mode 100644 index 000000000000..2b511f56eecb --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-float-false.nix @@ -0,0 +1,8 @@ +{ + eq = 6.9 == 4.2; + ne = 4.2 != 4.2; + lt = 2.5 < 1.5; + le = 2.5 <= 1.5; + gt = 1.5 > 2.5; + ge = 1.5 >= 2.5; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-float-true.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-float-true.exp new file mode 100644 index 000000000000..9160829dde78 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-float-true.exp @@ -0,0 +1 @@ +{ eq = true; ge = true; gt = true; le = true; lt = true; ne = true; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-float-true.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-float-true.nix new file mode 100644 index 000000000000..c505a85b1f3c --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-float-true.nix @@ -0,0 +1,8 @@ +{ + eq = 4.2 == 4.2; + ne = 6.9 != 4.2; + lt = 1.5 < 2.5; + le = 2.5 <= 2.5; + gt = 2.3 > 1.2; + ge = 2.3 >= 2.3; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-int-false.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-int-false.exp new file mode 100644 index 000000000000..95a0e7378b5e --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-int-false.exp @@ -0,0 +1 @@ +{ eq = false; ge = false; gt = false; le = false; lt = false; ne = false; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-int-false.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-int-false.nix new file mode 100644 index 000000000000..7d6b30419fac --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-int-false.nix @@ -0,0 +1,8 @@ +{ + eq = 69 == 42; + ne = 42 != 42; + lt = 2 < 1; + le = 2 <= 1; + gt = 1 > 2; + ge = 1 >= 2; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-int-true.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-int-true.exp new file mode 100644 index 000000000000..9160829dde78 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-int-true.exp @@ -0,0 +1 @@ +{ eq = true; ge = true; gt = true; le = true; lt = true; ne = true; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-int-true.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-int-true.nix new file mode 100644 index 000000000000..0bf474e53fd0 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-int-true.nix @@ -0,0 +1,8 @@ +{ + eq = 42 == 42; + ne = 69 != 42; + lt = 1 < 2; + le = 2 <= 2; + gt = 2 > 1; + ge = 2 >= 2; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-num-false.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-num-false.exp new file mode 100644 index 000000000000..95a0e7378b5e --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-num-false.exp @@ -0,0 +1 @@ +{ eq = false; ge = false; gt = false; le = false; lt = false; ne = false; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-num-false.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-num-false.nix new file mode 100644 index 000000000000..61b206c03376 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-num-false.nix @@ -0,0 +1,8 @@ +{ + eq = 6.9 == 4; + ne = 4.0 != 4; + lt = 2.5 < 1; + le = 2 <= 1.5; + gt = 1 > 1.1; + ge = 1.5 >= 2; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-num-true.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-num-true.exp new file mode 100644 index 000000000000..9160829dde78 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-num-true.exp @@ -0,0 +1 @@ +{ eq = true; ge = true; gt = true; le = true; lt = true; ne = true; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-num-true.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-num-true.nix new file mode 100644 index 000000000000..ad77074710ca --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-num-true.nix @@ -0,0 +1,8 @@ +{ + eq = 42.0 == 42; + ne = 6.9 != 4; + lt = 1.5 < 2; + le = 2.0 <= 2.0; + gt = 1.1 > 1; + ge = 2.3 >= 2.3; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-str-false.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-str-false.exp new file mode 100644 index 000000000000..95a0e7378b5e --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-str-false.exp @@ -0,0 +1 @@ +{ eq = false; ge = false; gt = false; le = false; lt = false; ne = false; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-str-false.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-str-false.nix new file mode 100644 index 000000000000..b5773a21d374 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-str-false.nix @@ -0,0 +1,8 @@ +{ + eq = "test" == "not test"; + ne = "test" != "test"; + lt = "bcd" < "abc"; + le = "bcd" <= "abc"; + gt = "abc" > "bcd"; + ge = "abc" >= "bcd"; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-str-true.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-str-true.exp new file mode 100644 index 000000000000..9160829dde78 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-str-true.exp @@ -0,0 +1 @@ +{ eq = true; ge = true; gt = true; le = true; lt = true; ne = true; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-str-true.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-str-true.nix new file mode 100644 index 000000000000..172d2237e9e2 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-cmp-str-true.nix @@ -0,0 +1,8 @@ +{ + eq = "test" == "test"; + ne = "test" != "not test"; + lt = "abc" < "bcd"; + le = "bcd" <= "bcd"; + gt = "bcd" > "abc"; + ge = "bcd" >= "bcd"; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-coerce-opadd.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-coerce-opadd.exp new file mode 100644 index 000000000000..d874518a37ed --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-coerce-opadd.exp @@ -0,0 +1 @@ +[ "lordnikon" "zerocool" /tmp/31337h4x0r "fooblah" "blahfoo" ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-coerce-opadd.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-coerce-opadd.nix new file mode 100644 index 000000000000..6ddd30e7a189 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-coerce-opadd.nix @@ -0,0 +1,7 @@ +[ + ({ __toString = _: "lord"; } + "nikon") + ("zero" + { __toString = _: "cool"; }) + (/tmp/31337 + "h4x0r") + ("foo" + { outPath="blah"; }) + ({ outPath="blah"; } + "foo") +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-compare-lists.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-compare-lists.exp new file mode 100644 index 000000000000..3b7fd398198a --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-compare-lists.exp @@ -0,0 +1 @@ +[ false true true true false true false false false true false false false true true ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-compare-lists.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-compare-lists.nix new file mode 100644 index 000000000000..9b73df61d84e --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-compare-lists.nix @@ -0,0 +1,17 @@ +[ + ([1 2] < [1]) + ([1 2] < [2 3]) + ([1 2] < [2]) + ([1 2] < [1 2 3]) + ([3 4] < [1]) + ([1 2] > [1]) + ([1 2] > [2 3]) + ([1 2] > [2]) + ([1 2] > [1 2 3]) + ([3 4] > [1]) + ([1 2] <= [1]) + ([1 2] >= [2 3]) + ([1 2] >= [2]) + ([1 2] <= [1 2 3]) + ([3 4] >= [1]) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-concat-lists.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-concat-lists.exp new file mode 100644 index 000000000000..3bed31f76e3f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-concat-lists.exp @@ -0,0 +1 @@ +[ 1 2 3 4 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-concat-lists.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-concat-lists.nix new file mode 100644 index 000000000000..de332cd29f3a --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-concat-lists.nix @@ -0,0 +1 @@ +[ 1 2 ] ++ [ 3 4 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-concat-strings.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-concat-strings.exp new file mode 100644 index 000000000000..cd4bc1ab64cc --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-concat-strings.exp @@ -0,0 +1 @@ +"hello world" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-concat-strings.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-concat-strings.nix new file mode 100644 index 000000000000..1fc708929989 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-concat-strings.nix @@ -0,0 +1 @@ +"hello " + "world" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-concatmap.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-concatmap.exp new file mode 100644 index 000000000000..14d804aa22f5 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-concatmap.exp @@ -0,0 +1 @@ +[ "a" "z" "b" "z" ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-concatmap.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-concatmap.nix new file mode 100644 index 000000000000..149a0722b11d --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-concatmap.nix @@ -0,0 +1 @@ +(builtins.concatMap (x: [x] ++ ["z"]) ["a" "b"]) diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-concatstringssep.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-concatstringssep.exp new file mode 100644 index 000000000000..93987647ffe6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-concatstringssep.exp @@ -0,0 +1 @@ +[ "" "foobarxyzzy" "foo, bar, xyzzy" "foo" "" ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-concatstringssep.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-concatstringssep.nix new file mode 100644 index 000000000000..adc4c41bd551 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-concatstringssep.nix @@ -0,0 +1,8 @@ +with builtins; + +[ (concatStringsSep "" []) + (concatStringsSep "" ["foo" "bar" "xyzzy"]) + (concatStringsSep ", " ["foo" "bar" "xyzzy"]) + (concatStringsSep ", " ["foo"]) + (concatStringsSep ", " []) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-contains-nested-non-set.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-contains-nested-non-set.exp new file mode 100644 index 000000000000..c508d5366f70 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-contains-nested-non-set.exp @@ -0,0 +1 @@ +false diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-contains-nested-non-set.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-contains-nested-non-set.nix new file mode 100644 index 000000000000..361ba9144594 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-contains-nested-non-set.nix @@ -0,0 +1,3 @@ +# ? operator should work even if encountering a non-set value on the +# walk +{ a.b = 42; } ? a.b.c diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-contains-non-set.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-contains-non-set.exp new file mode 100644 index 000000000000..ca00e3c049d6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-contains-non-set.exp @@ -0,0 +1 @@ +[ false false false false ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-contains-non-set.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-contains-non-set.nix new file mode 100644 index 000000000000..c086759f456b --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-contains-non-set.nix @@ -0,0 +1,3 @@ +# Nix allows using the ? operator on non-set types, in which case it +# should always return false. +[ (123 ? key) ("foo" ? key) (null ? key) ([ "key" ] ? key) ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-deeply-nested-attrs.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-deeply-nested-attrs.exp new file mode 100644 index 000000000000..7cf54d959611 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-deeply-nested-attrs.exp @@ -0,0 +1 @@ +{ a = { b = { c = { d = { e = { f = { g = "deep!"; }; }; }; }; }; }; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-deeply-nested-attrs.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-deeply-nested-attrs.nix new file mode 100644 index 000000000000..91649d0c6dd4 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-deeply-nested-attrs.nix @@ -0,0 +1 @@ +{ a.b.c.d.e.f.g = "deep!"; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-deeply-nested-with-closure.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-deeply-nested-with-closure.exp new file mode 100644 index 000000000000..3bed31f76e3f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-deeply-nested-with-closure.exp @@ -0,0 +1 @@ +[ 1 2 3 4 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-deeply-nested-with-closure.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-deeply-nested-with-closure.nix new file mode 100644 index 000000000000..7f13f1f27030 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-deeply-nested-with-closure.nix @@ -0,0 +1,18 @@ +# This convoluted test constructs a situation in which dynamically +# resolved upvalues refer `with` blocks introduced at different lambda +# context boundaries, i.e. the access to a, b in the innermost closure +# must be threaded through upvalues in several levels. + +(_: +with { a = 1; b = 1; }; + +_: +with { b = 2; c = 2; }; + +_: +with { c = 3; d = 3; }; + +_: +with { d = 4; }; + +[ a b c d ]) null null null null diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-deeply-nested-with.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-deeply-nested-with.exp new file mode 100644 index 000000000000..3bed31f76e3f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-deeply-nested-with.exp @@ -0,0 +1 @@ +[ 1 2 3 4 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-deeply-nested-with.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-deeply-nested-with.nix new file mode 100644 index 000000000000..7f1128b6707b --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-deeply-nested-with.nix @@ -0,0 +1,6 @@ +with { a = 1; b = 1; }; +with { b = 2; c = 2; }; +with { c = 3; d = 3; }; +with { d = 4; }; + +[ a b c d ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-deepseq.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-deepseq.exp new file mode 100644 index 000000000000..8d38505c1686 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-deepseq.exp @@ -0,0 +1 @@ +456 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-deepseq.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-deepseq.nix new file mode 100644 index 000000000000..53aa4b1dc251 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-deepseq.nix @@ -0,0 +1 @@ +builtins.deepSeq (let as = { x = 123; y = as; }; in as) 456 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-deferred-unary-formals.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-deferred-unary-formals.exp new file mode 100644 index 000000000000..5993db7ccc5a --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-deferred-unary-formals.exp @@ -0,0 +1 @@ +[ false -2 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-deferred-unary-formals.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-deferred-unary-formals.nix new file mode 100644 index 000000000000..1fbb3e853af4 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-deferred-unary-formals.nix @@ -0,0 +1,6 @@ +# Application of unary operators on deferred formals arguments (via +# defaulting), see also b/255. +[ + (({ b ? !a, a }: b) { a = true; }) + (({ b ? -a, a }: b) { a = 2; }) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-deferred-with.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-deferred-with.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-deferred-with.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-deferred-with.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-deferred-with.nix new file mode 100644 index 000000000000..af227ae28e87 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-deferred-with.nix @@ -0,0 +1,8 @@ +# Tests using `with` on a set that does not yet exist on the stack. + +let + result = with set; value; + set = { + value = 42; + }; +in result diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-dirof.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-dirof.exp new file mode 100644 index 000000000000..ff464e4c30d4 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-dirof.exp @@ -0,0 +1 @@ +[ /foo "." "foo//" "foo" "." "." / "/" ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-dirof.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-dirof.nix new file mode 100644 index 000000000000..13cf47320515 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-dirof.nix @@ -0,0 +1,10 @@ +[ + (builtins.dirOf /foo/bar) + (builtins.dirOf "foo") + (builtins.dirOf "foo///") + (builtins.dirOf "foo/bar") + (builtins.dirOf "./.") + (builtins.dirOf "") + (builtins.dirOf /.) + (builtins.toString (builtins.dirOf /.)) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-elem.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-elem.exp new file mode 100644 index 000000000000..3cf6c0e962f0 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-elem.exp @@ -0,0 +1 @@ +[ true false 30 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-elem.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-elem.nix new file mode 100644 index 000000000000..71ea7a4ed03d --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-elem.nix @@ -0,0 +1,6 @@ +with import ./lib.nix; + +let xs = range 10 40; in + +[ (builtins.elem 23 xs) (builtins.elem 42 xs) (builtins.elemAt xs 20) ] + diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-empty-rec-inherit.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-empty-rec-inherit.exp new file mode 100644 index 000000000000..ffcd4415b08f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-empty-rec-inherit.exp @@ -0,0 +1 @@ +{ } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-empty-rec-inherit.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-empty-rec-inherit.nix new file mode 100644 index 000000000000..a1181431deca --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-empty-rec-inherit.nix @@ -0,0 +1 @@ +rec { inherit; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-eq-float.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-eq-float.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-eq-float.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-eq-float.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-eq-float.nix new file mode 100644 index 000000000000..398f4a9dfc55 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-eq-float.nix @@ -0,0 +1 @@ +4.2 == 4.2 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-eq-int.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-eq-int.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-eq-int.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-eq-int.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-eq-int.nix new file mode 100644 index 000000000000..dc52ba112a60 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-eq-int.nix @@ -0,0 +1 @@ +42 == 42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-eq-nested-list.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-eq-nested-list.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-eq-nested-list.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-eq-nested-list.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-eq-nested-list.nix new file mode 100644 index 000000000000..5dbcb515295b --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-eq-nested-list.nix @@ -0,0 +1 @@ +[["f" ""]] == [["f" ""]] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-escapify-integer-keys.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-escapify-integer-keys.exp new file mode 100644 index 000000000000..aa98a082a8ac --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-escapify-integer-keys.exp @@ -0,0 +1 @@ +{ "3" = 3; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-escapify-integer-keys.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-escapify-integer-keys.nix new file mode 100644 index 000000000000..aa98a082a8ac --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-escapify-integer-keys.nix @@ -0,0 +1 @@ +{ "3" = 3; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-fib.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-fib.exp new file mode 100644 index 000000000000..8643cf6debac --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-fib.exp @@ -0,0 +1 @@ +89 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-fib.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-fib.nix new file mode 100644 index 000000000000..9a22d85ac5f1 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-fib.nix @@ -0,0 +1,7 @@ +let + fib' = i: n: m: if i == 0 + then n + else fib' (i - 1) m (n + m); + + fib = n: fib' n 1 1; +in fib 10 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-fix.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-fix.exp new file mode 100644 index 000000000000..c1581543511f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-fix.exp @@ -0,0 +1 @@ +{ a = 1; b = 21; c = 42; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-fix.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-fix.nix new file mode 100644 index 000000000000..27d2fae1042e --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-fix.nix @@ -0,0 +1,7 @@ +let + fix = f: let x = f x; in x; +in fix(self: { + a = 1; + b = self.a + 20; + c = self.b * 2; +}) diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-float-repr.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-float-repr.exp new file mode 100644 index 000000000000..c55d2be717f1 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-float-repr.exp @@ -0,0 +1 @@ +1.23457 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-float-repr.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-float-repr.nix new file mode 100644 index 000000000000..447bd5af7f5c --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-float-repr.nix @@ -0,0 +1,2 @@ +# Floats are displayed with a maximum of 5 digits +1.23456789 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-floor.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-floor.exp new file mode 100644 index 000000000000..6f98a7f48f2f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-floor.exp @@ -0,0 +1 @@ +[ 3 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-floor.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-floor.nix new file mode 100644 index 000000000000..c6b79c91a1b4 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-floor.nix @@ -0,0 +1 @@ +[ (builtins.floor 3.4) ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-foldlStrict-lazy-elements.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-foldlStrict-lazy-elements.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-foldlStrict-lazy-elements.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-foldlStrict-lazy-elements.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-foldlStrict-lazy-elements.nix new file mode 100644 index 000000000000..fc4129a2543a --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-foldlStrict-lazy-elements.nix @@ -0,0 +1,8 @@ +let + lst = builtins.foldl' + (acc: x: acc ++ [ x ]) + [ ] + [ 42 (throw "this shouldn't be evaluated") ]; +in + +builtins.head lst diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-foldlStrict-lazy-initial-accumulator.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-foldlStrict-lazy-initial-accumulator.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-foldlStrict-lazy-initial-accumulator.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-foldlStrict-lazy-initial-accumulator.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-foldlStrict-lazy-initial-accumulator.nix new file mode 100644 index 000000000000..59fd29b55237 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-foldlStrict-lazy-initial-accumulator.nix @@ -0,0 +1,4 @@ +builtins.foldl' + (_: x: x) + (throw "This is never forced") + [ "but the results of applying op are" 42 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-foldlStrict.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-foldlStrict.exp new file mode 100644 index 000000000000..8d683a20fab7 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-foldlStrict.exp @@ -0,0 +1 @@ +[ 6 [ 0 1 2 3 ] 2 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-foldlStrict.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-foldlStrict.nix new file mode 100644 index 000000000000..44c0349387ff --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-foldlStrict.nix @@ -0,0 +1,5 @@ +[ + (builtins.foldl' builtins.add 0 [1 2 3]) + (builtins.foldl' (l1: l2: l1 ++ l2) [0] [[1] [2 3]]) + (builtins.foldl' (x: y: if x == 0 then y else x * y) 0 [1 2]) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-fromjson-escapes.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-fromjson-escapes.exp new file mode 100644 index 000000000000..add5505a8287 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-fromjson-escapes.exp @@ -0,0 +1 @@ +"quote \" reverse solidus \\ solidus / backspace formfeed newline \n carriage return \r horizontal tab \t 1 char unicode encoded backspace 1 char unicode encoded e with accent é 2 char unicode encoded s with caron š 3 char unicode encoded rightwards arrow →" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-fromjson-escapes.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-fromjson-escapes.nix new file mode 100644 index 000000000000..f00713507732 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-fromjson-escapes.nix @@ -0,0 +1,3 @@ +# This string contains all supported escapes in a JSON string, per json.org +# \b and \f are not supported by Nix +builtins.fromJSON ''"quote \" reverse solidus \\ solidus \/ backspace \b formfeed \f newline \n carriage return \r horizontal tab \t 1 char unicode encoded backspace \u0008 1 char unicode encoded e with accent \u00e9 2 char unicode encoded s with caron \u0161 3 char unicode encoded rightwards arrow \u2192"'' diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-fromjson.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-fromjson.exp new file mode 100644 index 000000000000..24aa21d78ff7 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-fromjson.exp @@ -0,0 +1 @@ +[ { Image = { Animated = false; Height = 600; IDs = [ 116 943 234 38793 true false null -100 ]; Latitude = 37.7668; Longitude = -122.396; Thumbnail = { Height = 125; Url = "http://www.example.com/image/481989943"; Width = 100; }; Title = "View from 15th Floor"; Width = 800; }; } { name = "a"; value = "b"; } [ 1 2 3 4 ] ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-fromjson.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-fromjson.nix new file mode 100644 index 000000000000..e530789446ef --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-fromjson.nix @@ -0,0 +1,24 @@ +[ +# RFC 7159, section 13. + (builtins.fromJSON + '' + { + "Image": { + "Width": 800, + "Height": 600, + "Title": "View from 15th Floor", + "Thumbnail": { + "Url": "http://www.example.com/image/481989943", + "Height": 125, + "Width": 100 + }, + "Animated" : false, + "IDs": [116, 943, 234, 38793, true ,false,null, -100], + "Latitude": 37.7668, + "Longitude": -122.396 + } + } + '') + (builtins.fromJSON ''{"name": "a", "value": "b"}'') + (builtins.fromJSON "[ 1, 2, 3, 4 ]") +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-functionargs.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-functionargs.exp new file mode 100644 index 000000000000..c1c9f8ffaf69 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-functionargs.exp @@ -0,0 +1 @@ +[ "stdenv" "fetchurl" "aterm-stdenv" "aterm-stdenv2" "libX11" "libXv" "mplayer-stdenv2.libXv-libX11" "mplayer-stdenv2.libXv-libX11_2" "nix-stdenv-aterm-stdenv" "nix-stdenv2-aterm2-stdenv2" ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-functionargs.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-functionargs.nix new file mode 100644 index 000000000000..cd95b971f8a9 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-functionargs.nix @@ -0,0 +1,80 @@ +let + + stdenvFun = { }: { name = "stdenv"; }; + stdenv2Fun = { }: { name = "stdenv2"; }; + fetchurlFun = { stdenv }: assert stdenv.name == "stdenv"; { name = "fetchurl"; }; + atermFun = { stdenv, fetchurl }: { name = "aterm-${stdenv.name}"; }; + aterm2Fun = { stdenv, fetchurl }: { name = "aterm2-${stdenv.name}"; }; + nixFun = { stdenv, fetchurl, aterm }: { name = "nix-${stdenv.name}-${aterm.name}"; }; + + mplayerFun = + { stdenv, fetchurl, enableX11 ? false, xorg ? null, enableFoo ? true, foo ? null }: + assert stdenv.name == "stdenv2"; + assert enableX11 -> xorg.libXv.name == "libXv"; + assert enableFoo -> foo != null; + { name = "mplayer-${stdenv.name}.${xorg.libXv.name}-${xorg.libX11.name}"; }; + + makeOverridable = f: origArgs: f origArgs // + { override = newArgs: + makeOverridable f (origArgs // (if builtins.isFunction newArgs then newArgs origArgs else newArgs)); + }; + + callPackage_ = pkgs: f: args: + makeOverridable f ((builtins.intersectAttrs (builtins.functionArgs f) pkgs) // args); + + allPackages = + { overrides ? (pkgs: pkgsPrev: { }) }: + let + callPackage = callPackage_ pkgs; + pkgs = pkgsStd // (overrides pkgs pkgsStd); + pkgsStd = { + inherit pkgs; + stdenv = callPackage stdenvFun { }; + stdenv2 = callPackage stdenv2Fun { }; + fetchurl = callPackage fetchurlFun { }; + aterm = callPackage atermFun { }; + xorg = callPackage xorgFun { }; + mplayer = callPackage mplayerFun { stdenv = pkgs.stdenv2; enableFoo = false; }; + nix = callPackage nixFun { }; + }; + in pkgs; + + libX11Fun = { stdenv, fetchurl }: { name = "libX11"; }; + libX11_2Fun = { stdenv, fetchurl }: { name = "libX11_2"; }; + libXvFun = { stdenv, fetchurl, libX11 }: { name = "libXv"; }; + + xorgFun = + { pkgs }: + let callPackage = callPackage_ (pkgs // pkgs.xorg); in + { + libX11 = callPackage libX11Fun { }; + libXv = callPackage libXvFun { }; + }; + +in + +let + + pkgs = allPackages { }; + + pkgs2 = allPackages { + overrides = pkgs: pkgsPrev: { + stdenv = pkgs.stdenv2; + nix = pkgsPrev.nix.override { aterm = aterm2Fun { inherit (pkgs) stdenv fetchurl; }; }; + xorg = pkgsPrev.xorg // { libX11 = libX11_2Fun { inherit (pkgs) stdenv fetchurl; }; }; + }; + }; + +in + + [ pkgs.stdenv.name + pkgs.fetchurl.name + pkgs.aterm.name + pkgs2.aterm.name + pkgs.xorg.libX11.name + pkgs.xorg.libXv.name + pkgs.mplayer.name + pkgs2.mplayer.name + pkgs.nix.name + pkgs2.nix.name + ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-functor-call.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-functor-call.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-functor-call.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-functor-call.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-functor-call.nix new file mode 100644 index 000000000000..80ae345d836b --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-functor-call.nix @@ -0,0 +1 @@ +{ x = 21; __functor = self: y: self.x * y; } 2 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-genlist.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-genlist.exp new file mode 100644 index 000000000000..cd4ca34f14df --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-genlist.exp @@ -0,0 +1 @@ +[ 0 1 4 9 16 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-genlist.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-genlist.nix new file mode 100644 index 000000000000..2c4dfba20324 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-genlist.nix @@ -0,0 +1 @@ +builtins.genList (x: x * x) 5 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-identifier-formatting.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-identifier-formatting.exp new file mode 100644 index 000000000000..074f5f07f5f0 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-identifier-formatting.exp @@ -0,0 +1 @@ +{ "'quoted'" = false; "-20°" = false; "2normal" = false; "45 44 43-'3 2 1" = false; "9front" = false; Very2Normal = true; VeryNormal = true; _'12 = true; "_'12.5" = false; __internal = true; _internal = true; abort = true; assert = true; "attr.path" = false; false = true; foldl' = true; normal = true; normal2 = true; null = true; or = true; throw = true; true = true; x = true; x' = true; x'' = true; "😀" = false; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-identifier-formatting.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-identifier-formatting.nix new file mode 100644 index 000000000000..8f9aa2823801 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-identifier-formatting.nix @@ -0,0 +1,30 @@ +{ + __internal = true; + _internal = true; + normal = true; + VeryNormal = true; + normal2 = true; + Very2Normal = true; + _'12 = true; + foldl' = true; + x = true; + x' = true; + x'' = true; + + true = true; + false = true; + null = true; + or = true; + "assert" = true; # -ish + throw = true; + abort = true; + + "9front" = false; + "2normal" = false; + "-20°" = false; + "45 44 43-'3 2 1" = false; + "attr.path" = false; + "'quoted'" = false; + "_'12.5" = false; + "😀" = false; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-import.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-import.exp new file mode 100644 index 000000000000..5ba7f64d78a7 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-import.exp @@ -0,0 +1 @@ +[ 42 42 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-import.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-import.nix new file mode 100644 index 000000000000..49cd244f06c1 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-import.nix @@ -0,0 +1,4 @@ +[ + (import ./directory) + (import ./directory/default.nix) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-inherit-string-ident.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-inherit-string-ident.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-inherit-string-ident.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-inherit-string-ident.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-inherit-string-ident.nix new file mode 100644 index 000000000000..dde81e5a7c30 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-inherit-string-ident.nix @@ -0,0 +1,7 @@ +# identifiers in inherits can be string-like expressions + +let + set = { + inherit ({ value = 42; }) "value"; + }; +in set.value diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-internal-formals-deferred.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-internal-formals-deferred.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-internal-formals-deferred.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-internal-formals-deferred.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-internal-formals-deferred.nix new file mode 100644 index 000000000000..5c6702120fc4 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-internal-formals-deferred.nix @@ -0,0 +1,3 @@ +# Tests formals which have internal default values that must be deferred. + +({ optional ? defaultValue, defaultValue }: optional) { defaultValue = 42; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-internal-formals.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-internal-formals.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-internal-formals.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-internal-formals.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-internal-formals.nix new file mode 100644 index 000000000000..c6dd5e9d54cf --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-internal-formals.nix @@ -0,0 +1,3 @@ +# Tests formals which have internal default values. + +({ defaultValue, optional ? defaultValue }: optional) { defaultValue = 42; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-intersectattrs.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-intersectattrs.exp new file mode 100644 index 000000000000..25001b211fb8 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-intersectattrs.exp @@ -0,0 +1 @@ +{ a = 100; b = 200; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-intersectattrs.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-intersectattrs.nix new file mode 100644 index 000000000000..3534132ed48b --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-intersectattrs.nix @@ -0,0 +1,3 @@ +builtins.intersectAttrs + { a = 1; b = 2; c = 3; } + { a = 100; b = 200; d = 5; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-lambda-identity.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-lambda-identity.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-lambda-identity.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-lambda-identity.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-lambda-identity.nix new file mode 100644 index 000000000000..f2ee49df8092 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-lambda-identity.nix @@ -0,0 +1,2 @@ +# Identity function is the simplest possible function. +(x: x) 42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-late-binding-closure.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-late-binding-closure.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-late-binding-closure.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-late-binding-closure.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-late-binding-closure.nix new file mode 100644 index 000000000000..dae170b06bad --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-late-binding-closure.nix @@ -0,0 +1,4 @@ +let + f = n: n + a; + a = 2; +in f 40 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-late-binding.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-late-binding.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-late-binding.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-late-binding.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-late-binding.nix new file mode 100644 index 000000000000..41c4c53ea271 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-late-binding.nix @@ -0,0 +1,4 @@ +let + a = b; + b = 42; +in a diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-assert.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-assert.exp new file mode 100644 index 000000000000..48082f72f087 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-assert.exp @@ -0,0 +1 @@ +12 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-assert.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-assert.nix new file mode 100644 index 000000000000..5a36964976ca --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-assert.nix @@ -0,0 +1,8 @@ +assert true; + +let + x = assert false; 13; + y = 12; +in + +{ inherit x y; }.y diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-equality.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-equality.exp new file mode 100644 index 000000000000..1c70d1bcf188 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-equality.exp @@ -0,0 +1 @@ +[ true true false true true ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-equality.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-equality.nix new file mode 100644 index 000000000000..d19d1213d695 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-equality.nix @@ -0,0 +1,15 @@ +let + attrs1 = { x = 1 + 2; }; + attrs2 = { x = 2 + 1; }; + list1 = [ (1 + 2) ]; + list2 = [ (2 + 1) ]; + list3 = [ (2 + 2) ]; + list4 = [ (2 + 2) ]; + list5 = [ (2 + 2) ]; +in [ + (attrs1 == attrs2) + (list1 == list2) + (list3 == list2) + (list4 == [ 4 ]) + ([ 4 ] == list5) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-with-nested.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-with-nested.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-with-nested.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-with-nested.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-with-nested.nix new file mode 100644 index 000000000000..22ac14b3f19e --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-with-nested.nix @@ -0,0 +1,5 @@ +# The 'namespace' of a with should only be evaluated if an identifier +# from it is actually accessed. + +with (abort "should not be evaluated"); +let a = dynamic; in 42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-with.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-with.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-with.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-with.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-with.nix new file mode 100644 index 000000000000..8b1a0191dcc0 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-lazy-with.nix @@ -0,0 +1,6 @@ +# The 'namespace' of a with should only be evaluated if an identifier +# from it is actually accessed. + +with (abort "should not be evaluated"); + +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-legacy-let-fix.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-legacy-let-fix.exp new file mode 100644 index 000000000000..5d2955ffd5e1 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-legacy-let-fix.exp @@ -0,0 +1 @@ +{ one = 42; two = 42; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-legacy-let-fix.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-legacy-let-fix.nix new file mode 100644 index 000000000000..87fcffadee5f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-legacy-let-fix.nix @@ -0,0 +1,9 @@ +let { + a = 21; + b = body.one; + + body = { + one = a * 2; + two = b; + }; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-legacy-let-in-with.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-legacy-let-in-with.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-legacy-let-in-with.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-legacy-let-in-with.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-legacy-let-in-with.nix new file mode 100644 index 000000000000..a5b05426689e --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-legacy-let-in-with.nix @@ -0,0 +1 @@ +with {}; let { body = 42; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-legacy-let.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-legacy-let.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-legacy-let.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-legacy-let.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-legacy-let.nix new file mode 100644 index 000000000000..850e0252c2b6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-legacy-let.nix @@ -0,0 +1,4 @@ +let { + a = 21; + body = a * 2; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-let-identifiers.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-let-identifiers.exp new file mode 100644 index 000000000000..5776134d0e41 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-let-identifiers.exp @@ -0,0 +1 @@ +[ 1 2 3 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-let-identifiers.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-let-identifiers.nix new file mode 100644 index 000000000000..b6c37c34f949 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-let-identifiers.nix @@ -0,0 +1,5 @@ +let + a = 1; + "b" = 2; + ${"c"} = 3; +in [ a b c ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-let-inherit-from-later-bound.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-let-inherit-from-later-bound.exp new file mode 100644 index 000000000000..409940768f2a --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-let-inherit-from-later-bound.exp @@ -0,0 +1 @@ +23 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-let-inherit-from-later-bound.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-let-inherit-from-later-bound.nix new file mode 100644 index 000000000000..21196f48bcbe --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-let-inherit-from-later-bound.nix @@ -0,0 +1,13 @@ +let + inherit (c) d; + inherit (a) b c; + + a = { + b = 20; + c = { + d = 3; + }; + }; +in + +b + d diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-let-inherit.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-let-inherit.exp new file mode 100644 index 000000000000..0cfbf08886fc --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-let-inherit.exp @@ -0,0 +1 @@ +2 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-let-inherit.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-let-inherit.nix new file mode 100644 index 000000000000..4ec270e3bf43 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-let-inherit.nix @@ -0,0 +1,12 @@ +let + set = { + a = 1; + }; +in + let + set2 = { + b = 1; + }; + inherit (set) a; + inherit (set2) b; + in a + b diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-let-sibling-access.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-let-sibling-access.exp new file mode 100644 index 000000000000..00750edc07d6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-let-sibling-access.exp @@ -0,0 +1 @@ +3 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-let-sibling-access.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-let-sibling-access.nix new file mode 100644 index 000000000000..7a65a5b1cc9f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-let-sibling-access.nix @@ -0,0 +1,5 @@ +let + a = 1; + b = 2; + c = a + b; +in c diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-let-useful-plain-inherit-mixed.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-let-useful-plain-inherit-mixed.exp new file mode 100644 index 000000000000..3bed31f76e3f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-let-useful-plain-inherit-mixed.exp @@ -0,0 +1 @@ +[ 1 2 3 4 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-let-useful-plain-inherit-mixed.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-let-useful-plain-inherit-mixed.nix new file mode 100644 index 000000000000..30981099cbde --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-let-useful-plain-inherit-mixed.nix @@ -0,0 +1,20 @@ +# This test mixes different ways of creating bindings in a let … in expression +# to make sure that the compiler initialises the locals in the same order as +# they are declared. + +let + d = 4; +in + +# Trick to allow useless inherits in the following let +with { _unused = null; }; + +let + set = { b = 2; }; + a = 1; + inherit (set) b; + c = 3; + inherit d; +in + +[ a b c d ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-let-useful-plain-inherit.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-let-useful-plain-inherit.exp new file mode 100644 index 000000000000..d00491fd7e5b --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-let-useful-plain-inherit.exp @@ -0,0 +1 @@ +1 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-let-useful-plain-inherit.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-let-useful-plain-inherit.nix new file mode 100644 index 000000000000..3d1c46b10bb2 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-let-useful-plain-inherit.nix @@ -0,0 +1,9 @@ +with { a = 1; }; + +let + inherit a; +in + +with { a = 2; }; + +a diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-listtoattrs.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-listtoattrs.exp new file mode 100644 index 000000000000..74abef7bc6ed --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-listtoattrs.exp @@ -0,0 +1 @@ +"AAbar" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-listtoattrs.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-listtoattrs.nix new file mode 100644 index 000000000000..89888fd56178 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-listtoattrs.nix @@ -0,0 +1,15 @@ +with builtins; +let + fold = op: nul: list: + if list == [] + then nul + else op (head list) (fold op nul (tail list)); + concat = + fold (x: y: x + y) ""; + asi = name: value : { inherit name value; }; + list = [ ( asi "a" "A" ) ( asi "b" "B" ) ]; + a = builtins.listToAttrs list; + b = builtins.listToAttrs ( list ++ list ); + r = builtins.listToAttrs [ (asi "result" [ a b ]) ( asi "throw" (throw "this should not be thrown")) ]; + x = builtins.listToAttrs [ (asi "foo" "bar") (asi "foo" "bla") ]; +in concat (map (x: x.a) r.result) + x.foo diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-manual-rec.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-manual-rec.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-manual-rec.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-manual-rec.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-manual-rec.nix new file mode 100644 index 000000000000..8e1256d764e3 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-manual-rec.nix @@ -0,0 +1,9 @@ +# Manual desugaring of something similar to `rec`, to test lower level +# recursion primitives. + +let + set = with set; { + a = 21; + b = a * 2; + }; +in set.b diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-merge-nested-attrs.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-merge-nested-attrs.exp new file mode 100644 index 000000000000..911ab51de5ca --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-merge-nested-attrs.exp @@ -0,0 +1 @@ +{ set = { a = 1; b = 2; }; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-merge-nested-attrs.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-merge-nested-attrs.nix new file mode 100644 index 000000000000..78b28909a29c --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-merge-nested-attrs.nix @@ -0,0 +1,9 @@ +{ + set = { + a = 1; + }; + + set = { + b = 2; + }; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-merge-nested-rec-attrs.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-merge-nested-rec-attrs.exp new file mode 100644 index 000000000000..768eaae61cfc --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-merge-nested-rec-attrs.exp @@ -0,0 +1 @@ +{ set = { a = 21; b = 42; }; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-merge-nested-rec-attrs.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-merge-nested-rec-attrs.nix new file mode 100644 index 000000000000..cea4cb1b4f0d --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-merge-nested-rec-attrs.nix @@ -0,0 +1,12 @@ +{ + set = rec { + a = 21; + }; + + set = { + # Fun fact: This might be the only case in Nix where a lexical + # resolution of an identifier can only be resolved by looking at + # *siblings* in the AST. + b = 2 * a; + }; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-multiline-string.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-multiline-string.exp new file mode 100644 index 000000000000..9839e480b781 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-multiline-string.exp @@ -0,0 +1 @@ +"hello\nworld" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-multiline-string.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-multiline-string.nix new file mode 100644 index 000000000000..84beb22ed5c3 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-multiline-string.nix @@ -0,0 +1,2 @@ +''hello +world'' diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-multiple-nested-attrs.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-multiple-nested-attrs.exp new file mode 100644 index 000000000000..b5c707cf462e --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-multiple-nested-attrs.exp @@ -0,0 +1 @@ +{ a = { b = 15; }; b = { c = "test"; }; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-multiple-nested-attrs.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-multiple-nested-attrs.nix new file mode 100644 index 000000000000..5d611930ca7a --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-multiple-nested-attrs.nix @@ -0,0 +1 @@ +{ a.b = 15; b.c = "test"; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-mutually-recursive-let-binding.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-mutually-recursive-let-binding.exp new file mode 100644 index 000000000000..edca9baca9c0 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-mutually-recursive-let-binding.exp @@ -0,0 +1 @@ +{ a = 1; b = 2; c = 3; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-mutually-recursive-let-binding.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-mutually-recursive-let-binding.nix new file mode 100644 index 000000000000..1b3feda432ef --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-mutually-recursive-let-binding.nix @@ -0,0 +1,14 @@ +let + a = { + a = 3; + b = b.b; + }; + + b = { + a = a.a - 2; + b = 2; + c = a.c or 3; + }; +in + +a // b diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-ne-int.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-ne-int.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-ne-int.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-ne-int.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-ne-int.nix new file mode 100644 index 000000000000..e06b571a28bf --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-ne-int.nix @@ -0,0 +1 @@ +42 != 69 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-ne-string.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-ne-string.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-ne-string.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-ne-string.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-ne-string.nix new file mode 100644 index 000000000000..a83471e500ce --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-ne-string.nix @@ -0,0 +1 @@ +"this" != "that" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-closure.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-closure.exp new file mode 100644 index 000000000000..b6a7d89c68e0 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-closure.exp @@ -0,0 +1 @@ +16 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-closure.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-closure.nix new file mode 100644 index 000000000000..97bff7f07758 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-closure.nix @@ -0,0 +1 @@ +(a: b: c: d: a + b + c + d) 1 3 5 7 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-deferred-upvalue.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-deferred-upvalue.exp new file mode 100644 index 000000000000..209e3ef4b624 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-deferred-upvalue.exp @@ -0,0 +1 @@ +20 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-deferred-upvalue.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-deferred-upvalue.nix new file mode 100644 index 000000000000..358925e992c7 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-deferred-upvalue.nix @@ -0,0 +1,6 @@ +let + doubler = n: outer n; + outer = let inner = n: a * n; + a = 2; + in inner; +in doubler 10 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-has-attrs.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-has-attrs.exp new file mode 100644 index 000000000000..d2c1c04da3c4 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-has-attrs.exp @@ -0,0 +1 @@ +[ true true true true true true true false false false false false ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-has-attrs.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-has-attrs.nix new file mode 100644 index 000000000000..47dcec7a95f4 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-has-attrs.nix @@ -0,0 +1,26 @@ +let + set = { + a.b.c = 123; + foo = { + bar = 23; + }; + baz = 1; + }; + + tes = "random value"; +in + +[ + (set ? a) + (set ? a.b) + (set ? a.b.c) + (set ? foo) + (set ? foo.bar) + (set.foo ? bar) + (set ? baz) + (set ? x) + (set ? x.y.z) + (tes ? bar) + (tes ? x.y.z) + (null ? null) +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-keys-let.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-keys-let.exp new file mode 100644 index 000000000000..6db47b033efb --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-keys-let.exp @@ -0,0 +1 @@ +{ a = { b = 42; }; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-keys-let.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-keys-let.nix new file mode 100644 index 000000000000..c75b7130e1ba --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-keys-let.nix @@ -0,0 +1,4 @@ +let + inner = 21; + set.a.b = inner * 2; +in set diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-keys-rec.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-keys-rec.exp new file mode 100644 index 000000000000..77eb325dde19 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-keys-rec.exp @@ -0,0 +1 @@ +{ a = { b = { c = 42; }; }; outer = 21; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-keys-rec.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-keys-rec.nix new file mode 100644 index 000000000000..797d11108ff4 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-keys-rec.nix @@ -0,0 +1,4 @@ +rec { + outer = 21; + a.b.c = outer * 2; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-let-slots.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-let-slots.exp new file mode 100644 index 000000000000..e45ef1da2f54 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-let-slots.exp @@ -0,0 +1 @@ +[ 1 2 3 4 5 6 7 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-let-slots.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-let-slots.nix new file mode 100644 index 000000000000..811bb4361bf7 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-let-slots.nix @@ -0,0 +1,17 @@ +# This test deals with a tricky edge-case around scopes, where the +# stack slot accounting must correctly account for the position at +# which the body of a let expression is being initialised when +# resolving upvalues. + +let + a = 1; + b = 2; + outer = let + c = 3; + d = 4; + inner = let + e = 5; + f = 6; + in g: [ a b c d e f g ]; + in inner; +in outer 7 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-let.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-let.exp new file mode 100644 index 000000000000..7f8f011eb73d --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-let.exp @@ -0,0 +1 @@ +7 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-let.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-let.nix new file mode 100644 index 000000000000..caaa20211928 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-let.nix @@ -0,0 +1,7 @@ +let + a = let + b = 1; + c = 2; + in b + c; + b = 4; +in a + b diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-poisoning.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-poisoning.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-poisoning.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-poisoning.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-poisoning.nix new file mode 100644 index 000000000000..8d0280bb8973 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-poisoning.nix @@ -0,0 +1,4 @@ +let + null = 1; + f = n: n + null; +in f 41 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-set-thunks.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-set-thunks.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-set-thunks.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-set-thunks.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-set-thunks.nix new file mode 100644 index 000000000000..f3ad8293540f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-set-thunks.nix @@ -0,0 +1,5 @@ +({ + x = { + y = 42; + }; +}).x.y diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-siblings.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-siblings.exp new file mode 100644 index 000000000000..d757cae1f588 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-siblings.exp @@ -0,0 +1 @@ +{ outer = 42; sibling = 42; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-siblings.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-siblings.nix new file mode 100644 index 000000000000..31111d80810c --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-siblings.nix @@ -0,0 +1,7 @@ +rec { + outer = + let inner = sibling; + in inner; + + sibling = 42; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-thunks.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-thunks.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-thunks.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-thunks.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-thunks.nix new file mode 100644 index 000000000000..133929dd1961 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-thunks.nix @@ -0,0 +1,7 @@ +# If a thunk yields another thunk, OpForce should keep forcing until +# there is a value. +let + a = b; + b = c; + c = 42; +in a diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-with.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-with.exp new file mode 100644 index 000000000000..0cfbf08886fc --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-with.exp @@ -0,0 +1 @@ +2 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nested-with.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-with.nix new file mode 100644 index 000000000000..ea5ef568edab --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nested-with.nix @@ -0,0 +1,4 @@ +let + set1 = { a = 1; }; + set2 = { a = 2; }; +in with set1; with set2; a diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nix-version-cmp.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-nix-version-cmp.exp new file mode 100644 index 000000000000..3a2e3f4984a0 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nix-version-cmp.exp @@ -0,0 +1 @@ +-1 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-nix-version-cmp.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-nix-version-cmp.nix new file mode 100644 index 000000000000..6f353056127c --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-nix-version-cmp.nix @@ -0,0 +1,5 @@ +# nixpkgs checks against the `builtins.nixVersion` and fails if it +# doesn't like what it sees. To work around this we have a "user-agent +# style" version (see cl/6858) that ensures compatibility. + +builtins.compareVersions "2.3" builtins.nixVersion diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-observe-infinite-attrs.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-observe-infinite-attrs.exp new file mode 100644 index 000000000000..bbb332a5eea0 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-observe-infinite-attrs.exp @@ -0,0 +1 @@ +[ "x" "y" ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-observe-infinite-attrs.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-observe-infinite-attrs.nix new file mode 100644 index 000000000000..684c88f80067 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-observe-infinite-attrs.nix @@ -0,0 +1,4 @@ +# The below attribute set is infinitely large, but we should be able +# to observe it as long as we don't access its entire value. + +let as = { x = 123; y = as; }; in builtins.attrNames as.y.y diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-optimised-bools.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-optimised-bools.exp new file mode 100644 index 000000000000..9d9185fcd155 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-optimised-bools.exp @@ -0,0 +1 @@ +[ true true false false true true false false ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-optimised-bools.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-optimised-bools.nix new file mode 100644 index 000000000000..650d7f028df2 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-optimised-bools.nix @@ -0,0 +1,21 @@ +let + makeTrue = _: true; + makeFalse = _: false; +in +[ + # useless `false` + (false || makeTrue null) # true + (makeTrue null || false) # true + + # useless `true` + (true && makeFalse null) # false + (makeFalse null && true) # false + + # useless `||` + (true || makeFalse null) # true + (makeFalse null || true) # true + + # useless `&&` + (false && makeTrue null) # false + (makeTrue null && false) # false +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-default.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-default.exp new file mode 100644 index 000000000000..d00491fd7e5b --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-default.exp @@ -0,0 +1 @@ +1 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-default.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-default.nix new file mode 100644 index 000000000000..444f270af637 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-default.nix @@ -0,0 +1 @@ +{ b = 1; }.b or 2 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-nested-default.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-nested-default.exp new file mode 100644 index 000000000000..0cfbf08886fc --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-nested-default.exp @@ -0,0 +1 @@ +2 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-nested-default.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-nested-default.nix new file mode 100644 index 000000000000..ceffd0697b28 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-nested-default.nix @@ -0,0 +1 @@ +{ a.b = 1; }.a.c or 2 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-nested.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-nested.exp new file mode 100644 index 000000000000..d00491fd7e5b --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-nested.exp @@ -0,0 +1 @@ +1 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-nested.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-nested.nix new file mode 100644 index 000000000000..1a76594546b3 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-nested.nix @@ -0,0 +1 @@ +{ a.b = 1; }.a.b or 2 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-non-set.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-non-set.exp new file mode 100644 index 000000000000..a833e32892e4 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-non-set.exp @@ -0,0 +1 @@ +"works fine" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-non-set.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-non-set.nix new file mode 100644 index 000000000000..fd09bfee64c2 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator-non-set.nix @@ -0,0 +1,2 @@ +# `or` operator should keep working if it encounters a non-set type. +{ a.b = 42; }.a.b.c or "works fine" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator.exp new file mode 100644 index 000000000000..d00491fd7e5b --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator.exp @@ -0,0 +1 @@ +1 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator.nix new file mode 100644 index 000000000000..ce1e6e67c289 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-or-operator.nix @@ -0,0 +1 @@ +{ a = 1; }.a or 2 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-overlapping-nested-attrs.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-overlapping-nested-attrs.exp new file mode 100644 index 000000000000..2483a2718301 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-overlapping-nested-attrs.exp @@ -0,0 +1 @@ +{ a = { b = 15; c = "test"; }; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-overlapping-nested-attrs.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-overlapping-nested-attrs.nix new file mode 100644 index 000000000000..4154ff9da29f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-overlapping-nested-attrs.nix @@ -0,0 +1,4 @@ +{ + a.b = 15; + a.c = "test"; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-parsedrvname.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-parsedrvname.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-parsedrvname.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-parsedrvname.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-parsedrvname.nix new file mode 100644 index 000000000000..fea6e234dcfd --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-parsedrvname.nix @@ -0,0 +1,12 @@ + +# the first dash followed by a non-alphabetic character separates +# the "name" from the "version" + +assert builtins.parseDrvName "ripgrep-1.2" == { name = "ripgrep"; version = "1.2"; }; +assert builtins.parseDrvName "rip-grep-1.2" == { name = "rip-grep"; version = "1.2"; }; +assert builtins.parseDrvName "7zip_archiver-0.2" == { name = "7zip_archiver"; version = "0.2"; }; +assert builtins.parseDrvName "gcc-1-2" == { name = "gcc"; version = "1-2"; }; +assert builtins.parseDrvName "bash--1-2" == { name = "bash"; version = "-1-2"; }; +assert builtins.parseDrvName "xvidtune-?1-2" == { name = "xvidtune"; version = "?1-2"; }; + +true diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-pathexists.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-pathexists.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-pathexists.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-pathexists.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-pathexists.nix new file mode 100644 index 000000000000..ab3d0369401c --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-pathexists.nix @@ -0,0 +1,2 @@ +builtins.pathExists ./lib.nix +&& !builtins.pathExists ./bla.nix diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-poisoned-scopes.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-poisoned-scopes.exp new file mode 100644 index 000000000000..5776134d0e41 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-poisoned-scopes.exp @@ -0,0 +1 @@ +[ 1 2 3 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-poisoned-scopes.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-poisoned-scopes.nix new file mode 100644 index 000000000000..81f03d9e2b09 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-poisoned-scopes.nix @@ -0,0 +1,6 @@ +let + true = 1; + false = 2; + null = 3; +in +[ true false null ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-readDir.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-readDir.exp new file mode 100644 index 000000000000..bf8d2c14ea4f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-readDir.exp @@ -0,0 +1 @@ +{ bar = "regular"; foo = "directory"; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-readDir.nix.disabled b/tvix/eval/src/tests/tvix_tests/eval-okay-readDir.nix.disabled new file mode 100644 index 000000000000..a7ec9292aae2 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-readDir.nix.disabled @@ -0,0 +1 @@ +builtins.readDir ./readDir diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-readfile.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-readfile.exp new file mode 100644 index 000000000000..a2c87d0c439f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-readfile.exp @@ -0,0 +1 @@ +"builtins.readFile ./eval-okay-readfile.nix\n" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-readfile.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-readfile.nix new file mode 100644 index 000000000000..82f7cb17435a --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-readfile.nix @@ -0,0 +1 @@ +builtins.readFile ./eval-okay-readfile.nix diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-rec-dynamic-keys.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-rec-dynamic-keys.exp new file mode 100644 index 000000000000..ac8d062a6911 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-rec-dynamic-keys.exp @@ -0,0 +1 @@ +{ barbaz = 42; foobar = 42; val = 21; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-rec-dynamic-keys.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-rec-dynamic-keys.nix new file mode 100644 index 000000000000..8d7a8cef8e10 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-rec-dynamic-keys.nix @@ -0,0 +1,5 @@ +rec { + val = 21; + ${"foo" + "bar"} = 42; + ${"bar" + "baz"} = val * 2; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-rec-nested-access.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-rec-nested-access.exp new file mode 100644 index 000000000000..a1dca9bb6860 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-rec-nested-access.exp @@ -0,0 +1 @@ +{ a = { b = 1; c = 2; }; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-rec-nested-access.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-rec-nested-access.nix new file mode 100644 index 000000000000..7d037c6b37bc --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-rec-nested-access.nix @@ -0,0 +1,4 @@ +rec { + a.b = 1; + a.c = a.b * 2; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-recursive-attrs-all-features.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-recursive-attrs-all-features.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-recursive-attrs-all-features.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-recursive-attrs-all-features.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-recursive-attrs-all-features.nix new file mode 100644 index 000000000000..a234705b5e53 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-recursive-attrs-all-features.nix @@ -0,0 +1,13 @@ +let a = 1; +in +(rec { + inherit a; + + b = { + c = a + 20; + }; + + inherit (b) c; + + d = c * 2; +}).d diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-regex-match.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-regex-match.exp new file mode 100644 index 000000000000..950103539123 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-regex-match.exp @@ -0,0 +1 @@ +[ true true false true true true true false false true false [ "foobar" ] [ "FOO" ] [ "/path/to/" "/path/to" "foobar" "nix" ] [ null null "foobar" "cc" ] ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-regex-match.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-regex-match.nix new file mode 100644 index 000000000000..f774e00a215a --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-regex-match.nix @@ -0,0 +1,29 @@ +with builtins; + +let + + matches = pat: s: match pat s != null; + + splitFN = match "((.*)/)?([^/]*)\\.(nix|cc)"; + +in + +[ + (matches "foobar" "foobar") + (matches "fo*" "f") + (matches "fo+" "f") + (matches "fo*" "fo") + (matches "fo*" "foo") + (matches "fo+" "foo") + (matches "fo{1,2}" "foo") + (matches "fo{1,2}" "fooo") + (matches "fo*" "foobar") + (matches "[[:space:]]+([^[:space:]]+)[[:space:]]+" " foo ") + (matches "[[:space:]]+([[:upper:]]+)[[:space:]]+" " foo ") + + (match "(.*)\\.nix" "foobar.nix") + (match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " FOO ") + + (splitFN "/path/to/foobar.nix") + (splitFN "foobar.cc") +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-remove.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-remove.exp new file mode 100644 index 000000000000..8d38505c1686 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-remove.exp @@ -0,0 +1 @@ +456 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-remove.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-remove.nix new file mode 100644 index 000000000000..4ad5ba897fa7 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-remove.nix @@ -0,0 +1,5 @@ +let { + attrs = {x = 123; y = 456;}; + + body = (removeAttrs attrs ["x"]).y; +} \ No newline at end of file diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-repeated-list-to-attrs.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-repeated-list-to-attrs.exp new file mode 100644 index 000000000000..b4a1e66d6b8a --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-repeated-list-to-attrs.exp @@ -0,0 +1 @@ +[ 1 2 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-repeated-list-to-attrs.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-repeated-list-to-attrs.nix new file mode 100644 index 000000000000..2f719dcef5be --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-repeated-list-to-attrs.nix @@ -0,0 +1,13 @@ +# Ensure that builtins.listToAttrs returns the first instance of a key. + +let + inherit (builtins) foldl' listToAttrs; + + input = [ { name = "result"; value = 1; } { name = "result"; value = 2; } ]; + + # foldl-based version of listToAttrs with the _opposite_ behaviour. + listToAttrs' = list: foldl' ( acc: elem: acc // { ${elem.name} = elem.value; }) {} list; +in [ + (listToAttrs input).result + (listToAttrs' input).result +] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-seq.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-seq.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-seq.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-seq.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-seq.nix new file mode 100644 index 000000000000..fd0806c199a5 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-seq.nix @@ -0,0 +1 @@ +(builtins.seq 1 2) + (builtins.seq [ (throw "list") ] 20) + (builtins.seq { boing = throw "set"; } 20) diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-simple-closure.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-closure.exp new file mode 100644 index 000000000000..7f8f011eb73d --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-closure.exp @@ -0,0 +1 @@ +7 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-simple-closure.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-closure.nix new file mode 100644 index 000000000000..56445454fec7 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-closure.nix @@ -0,0 +1 @@ +(a: b: a + b) 2 5 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-simple-interpol.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-interpol.exp new file mode 100644 index 000000000000..cd4bc1ab64cc --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-interpol.exp @@ -0,0 +1 @@ +"hello world" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-simple-interpol.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-interpol.nix new file mode 100644 index 000000000000..125b0859ac42 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-interpol.nix @@ -0,0 +1 @@ +"hello ${"world"}" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-simple-let.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-let.exp new file mode 100644 index 000000000000..00750edc07d6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-let.exp @@ -0,0 +1 @@ +3 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-simple-let.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-let.nix new file mode 100644 index 000000000000..b440a220ff6d --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-let.nix @@ -0,0 +1,4 @@ +let + a = 1; + b = 2; +in a + b diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-simple-nested-attrs.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-nested-attrs.exp new file mode 100644 index 000000000000..6db47b033efb --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-nested-attrs.exp @@ -0,0 +1 @@ +{ a = { b = 42; }; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-simple-nested-attrs.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-nested-attrs.nix new file mode 100644 index 000000000000..a97394d16523 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-nested-attrs.nix @@ -0,0 +1 @@ +{ a.b = 42; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-simple-recursive-attrs.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-recursive-attrs.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-recursive-attrs.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-simple-recursive-attrs.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-recursive-attrs.nix new file mode 100644 index 000000000000..c86ff80383c0 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-recursive-attrs.nix @@ -0,0 +1,4 @@ +(rec { + a = 21; + b = a * 2; +}).b diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-simple-with.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-with.exp new file mode 100644 index 000000000000..d00491fd7e5b --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-with.exp @@ -0,0 +1 @@ +1 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-simple-with.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-with.nix new file mode 100644 index 000000000000..64962b50ff63 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-simple-with.nix @@ -0,0 +1,5 @@ +let + set = { + a = 1; + }; +in with set; a diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-stable-sort.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-stable-sort.exp new file mode 100644 index 000000000000..9d783762143e --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-stable-sort.exp @@ -0,0 +1 @@ +[ { index = 7; key = 0; } { index = 0; key = 1; } { index = 13; key = 1; } { index = 1; key = 2; } { index = 3; key = 2; } { index = 4; key = 2; } { index = 5; key = 2; } { index = 12; key = 2; } { index = 14; key = 2; } { index = 2; key = 3; } { index = 11; key = 3; } { index = 15; key = 3; } { index = 10; key = 4; } { index = 6; key = 5; } { index = 8; key = 5; } { index = 9; key = 5; } { index = 16; key = 22; } ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-stable-sort.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-stable-sort.nix new file mode 100644 index 000000000000..9969e0a29433 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-stable-sort.nix @@ -0,0 +1,7 @@ +let + keys = [ 1 2 3 2 2 2 5 0 5 5 4 3 2 1 2 3 22 ]; +in + +builtins.sort + (a: b: a.key < b.key) + (builtins.genList (index: { inherit index; key = builtins.elemAt keys index; }) (builtins.length keys)) diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-functor.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-functor.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-functor.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-functor.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-functor.nix new file mode 100644 index 000000000000..f8eba2ac2fc0 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-functor.nix @@ -0,0 +1,7 @@ +let + __functor = f; + f = self: x: self.out * x; +in { + inherit __functor; + out = 21; +} 2 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-if.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-if.exp new file mode 100644 index 000000000000..ffcd4415b08f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-if.exp @@ -0,0 +1 @@ +{ } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-if.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-if.nix new file mode 100644 index 000000000000..65e9e66d74dc --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-if.nix @@ -0,0 +1,6 @@ +let + a = {}; +in let + c = if builtins.isFunction a then a b else a; + b = {}; +in c diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-string-interpolation.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-string-interpolation.exp new file mode 100644 index 000000000000..fc2f21e9305c --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-string-interpolation.exp @@ -0,0 +1 @@ +"strict literal" diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-string-interpolation.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-string-interpolation.nix new file mode 100644 index 000000000000..bd3555bb2412 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-string-interpolation.nix @@ -0,0 +1,7 @@ +let + final = { text = "strict literal"; inherit x y; }; + x = "lazy ${throw "interpolation"}"; + y = "${throw "also lazy!"}"; +in + +final.text diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-with.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-with.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-with.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-with.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-with.nix new file mode 100644 index 000000000000..6f32660c4c33 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-thunked-with.nix @@ -0,0 +1,7 @@ +# Creates a `with` across multiple thunk boundaries. + +let + set = { + a = with { b = 42; }; b; + }; +in set.a diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-toplevel-finaliser.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-toplevel-finaliser.exp new file mode 100644 index 000000000000..edca9baca9c0 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-toplevel-finaliser.exp @@ -0,0 +1 @@ +{ a = 1; b = 2; c = 3; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-toplevel-finaliser.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-toplevel-finaliser.nix new file mode 100644 index 000000000000..4c6884bec3df --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-toplevel-finaliser.nix @@ -0,0 +1,10 @@ +# A simple expression with upvalue resolution beyond the target stack +# index of the root expression. + +let + a = 1; + b = 2; + c = 3; +in { + inherit a b c; +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-tryeval.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-tryeval.exp new file mode 100644 index 000000000000..8b6ed7dbac6b --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-tryeval.exp @@ -0,0 +1 @@ +{ v = false; w = { success = false; value = false; }; x = { success = true; value = "x"; }; y = { success = false; value = false; }; z = { success = false; value = false; }; } diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-tryeval.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-tryeval.nix new file mode 100644 index 000000000000..e2357c798753 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-tryeval.nix @@ -0,0 +1,7 @@ +{ + v = (builtins.tryEval (toString <oink>)).value; + w = builtins.tryEval <nope>; + x = builtins.tryEval "x"; + y = builtins.tryEval (assert false; "y"); + z = builtins.tryEval (throw "bla"); +} diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-unpoison-scope.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-unpoison-scope.exp new file mode 100644 index 000000000000..5462431496bc --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-unpoison-scope.exp @@ -0,0 +1 @@ +[ true false null 1 2 3 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-unpoison-scope.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-unpoison-scope.nix new file mode 100644 index 000000000000..30e9667da821 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-unpoison-scope.nix @@ -0,0 +1,7 @@ +let + poisoned = let + true = 1; + false = 2; + null = 3; + in [ true false null ]; +in [ true false null ] ++ poisoned diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-useless-inherit-with.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-useless-inherit-with.exp new file mode 100644 index 000000000000..d00491fd7e5b --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-useless-inherit-with.exp @@ -0,0 +1 @@ +1 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-useless-inherit-with.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-useless-inherit-with.nix new file mode 100644 index 000000000000..d335e5363000 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-useless-inherit-with.nix @@ -0,0 +1,15 @@ +# Normally using an `inherit` without a source attribute set within a +# `let` is a no-op, *unless* there is a with in-scope that might +# provide the value. + +# Provide a dynamic `x` identifier in the scope. +with ({ x = 1;}); + +# inherit this `x` as a static identifier +let inherit x; + +# Provide another dynamic `x` identifier +in with ({ x = 3; }); + +# Inherited static identifier should have precedence +x diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-value-pointer-compare.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-value-pointer-compare.exp new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-value-pointer-compare.exp @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-value-pointer-compare.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-value-pointer-compare.nix new file mode 100644 index 000000000000..f60d27d38157 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-value-pointer-compare.nix @@ -0,0 +1,6 @@ +# For an explanation of this behavior see //tvix/docs/value-pointer-equality.md +let + f = owo: "thia"; +in + +[ f 42 ] > [ f 21 ] \ No newline at end of file diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-value-pointer-equality.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-value-pointer-equality.exp new file mode 100644 index 000000000000..aec30f297af9 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-value-pointer-equality.exp @@ -0,0 +1 @@ +[ true true true true false false false true true true true true true true true true false false ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-value-pointer-equality.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-value-pointer-equality.nix new file mode 100644 index 000000000000..206881d7d9c2 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-value-pointer-equality.nix @@ -0,0 +1,46 @@ +# For an explanation of this behavior see //tvix/docs/value-pointer-equality.md +let + # Some incomparable values + f = MC: "Boing"; + t = [ (throw "is a little blue man") ]; + a = { "with" = abort "headphones and a big smile."; }; + + # Aliases + f' = f; + t' = t; + a' = a; + + peq1 = a: b: [ a ] == [ b ]; + peq2 = a: b: { x = a; } == { x = b; }; +in + +[ + # pointer equality of functions + (peq1 f f) + (peq2 f f) + (peq1 f f') + (peq2 f f') + + # encapsulation is necessary for pointer equality + (f == f) + (f == f') + # works with != + ([ f ] != [ f' ]) + + # thunks that fail to evaluated wrapped in sets/lists + (peq1 t t) + (peq2 t t) + (peq1 a a) + (peq2 a a) + (peq1 t t') + (peq2 t t') + (peq1 a' a) + (peq2 a' a) + + # function equality with builtins.elem + (builtins.elem f [ 21 f 42 ]) + + # pointer inequality + (peq1 f (x: x)) + (peq2 (x: x) f) +] \ No newline at end of file diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-with-closure.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-with-closure.exp new file mode 100644 index 000000000000..fa8f08cb6ff8 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-with-closure.exp @@ -0,0 +1 @@ +150 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-with-closure.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-with-closure.nix new file mode 100644 index 000000000000..7e2f7c073bfc --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-with-closure.nix @@ -0,0 +1,5 @@ +# Upvalues from `with` require special runtime handling. Do they work? +let + f = with { a = 15; }; n: n * a; +in +f 10 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-with-in-dynamic-key.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-with-in-dynamic-key.exp new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-with-in-dynamic-key.exp @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-with-in-dynamic-key.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-with-in-dynamic-key.nix new file mode 100644 index 000000000000..c44455a5bf83 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-with-in-dynamic-key.nix @@ -0,0 +1,12 @@ +# Tests correct tracking of stack indices within construction of an +# attribute set. Dynamic keys can be any expression, so something that +# is extremely sensitive to stack offsets (like `with`) can be tricky. + +let + set1 = { key = "b"; }; + set2 = { + a = 20; + ${with set1; key} = 20; + ${with { key = "c"; }; key} = 2; + }; +in set2.a + set2.b + set2.c diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-with-in-list.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-with-in-list.exp new file mode 100644 index 000000000000..5776134d0e41 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-with-in-list.exp @@ -0,0 +1 @@ +[ 1 2 3 ] diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-with-in-list.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-with-in-list.nix new file mode 100644 index 000000000000..bb62fdf31cd7 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/eval-okay-with-in-list.nix @@ -0,0 +1,13 @@ +# This code causes a situation where a list element causes an +# additional phantom value to temporarily be placed on the locals +# stack, which must be correctly accounted for by the compiler. + +let + set = { + value = 2; + }; +in [ + 1 + (with set; value) + 3 +] diff --git a/tvix/eval/src/tests/tvix_tests/identity-bool-false.nix b/tvix/eval/src/tests/tvix_tests/identity-bool-false.nix new file mode 100644 index 000000000000..c508d5366f70 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/identity-bool-false.nix @@ -0,0 +1 @@ +false diff --git a/tvix/eval/src/tests/tvix_tests/identity-bool-true.nix b/tvix/eval/src/tests/tvix_tests/identity-bool-true.nix new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/identity-bool-true.nix @@ -0,0 +1 @@ +true diff --git a/tvix/eval/src/tests/tvix_tests/identity-dollar-escape.nix b/tvix/eval/src/tests/tvix_tests/identity-dollar-escape.nix new file mode 100644 index 000000000000..08951d7637a6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/identity-dollar-escape.nix @@ -0,0 +1 @@ +"\${foobar}" diff --git a/tvix/eval/src/tests/tvix_tests/identity-empty-attrs.nix b/tvix/eval/src/tests/tvix_tests/identity-empty-attrs.nix new file mode 100644 index 000000000000..ffcd4415b08f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/identity-empty-attrs.nix @@ -0,0 +1 @@ +{ } diff --git a/tvix/eval/src/tests/tvix_tests/identity-empty-list.nix b/tvix/eval/src/tests/tvix_tests/identity-empty-list.nix new file mode 100644 index 000000000000..1e3ec7217afb --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/identity-empty-list.nix @@ -0,0 +1 @@ +[ ] diff --git a/tvix/eval/src/tests/tvix_tests/identity-flat-attrs.nix b/tvix/eval/src/tests/tvix_tests/identity-flat-attrs.nix new file mode 100644 index 000000000000..e7c2ae18a661 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/identity-flat-attrs.nix @@ -0,0 +1 @@ +{ a = 15; b = "string"; c = null; } diff --git a/tvix/eval/src/tests/tvix_tests/identity-float.nix b/tvix/eval/src/tests/tvix_tests/identity-float.nix new file mode 100644 index 000000000000..bf77d549685a --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/identity-float.nix @@ -0,0 +1 @@ +4.2 diff --git a/tvix/eval/src/tests/tvix_tests/identity-heterogeneous-list.nix b/tvix/eval/src/tests/tvix_tests/identity-heterogeneous-list.nix new file mode 100644 index 000000000000..87f7fb0d0668 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/identity-heterogeneous-list.nix @@ -0,0 +1 @@ +[ 1 2.1 "three" null ] diff --git a/tvix/eval/src/tests/tvix_tests/identity-homogeneous-float-list.nix b/tvix/eval/src/tests/tvix_tests/identity-homogeneous-float-list.nix new file mode 100644 index 000000000000..48e6655fb169 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/identity-homogeneous-float-list.nix @@ -0,0 +1 @@ +[ 4.2 6.9 13.37 ] diff --git a/tvix/eval/src/tests/tvix_tests/identity-homogeneous-int-list.nix b/tvix/eval/src/tests/tvix_tests/identity-homogeneous-int-list.nix new file mode 100644 index 000000000000..d23a5c38147b --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/identity-homogeneous-int-list.nix @@ -0,0 +1 @@ +[ 0 1 2 3 4 5 7 8 9 ] diff --git a/tvix/eval/src/tests/tvix_tests/identity-homogeneous-string-list.nix b/tvix/eval/src/tests/tvix_tests/identity-homogeneous-string-list.nix new file mode 100644 index 000000000000..d78a54e5b002 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/identity-homogeneous-string-list.nix @@ -0,0 +1 @@ +[ "string" "list" ] diff --git a/tvix/eval/src/tests/tvix_tests/identity-int.nix b/tvix/eval/src/tests/tvix_tests/identity-int.nix new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/identity-int.nix @@ -0,0 +1 @@ +42 diff --git a/tvix/eval/src/tests/tvix_tests/identity-kv-attrs.nix b/tvix/eval/src/tests/tvix_tests/identity-kv-attrs.nix new file mode 100644 index 000000000000..f1398b8d0592 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/identity-kv-attrs.nix @@ -0,0 +1 @@ +{ name = "foo"; value = 12; } diff --git a/tvix/eval/src/tests/tvix_tests/identity-nested-attrs.nix b/tvix/eval/src/tests/tvix_tests/identity-nested-attrs.nix new file mode 100644 index 000000000000..6a139452ef28 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/identity-nested-attrs.nix @@ -0,0 +1 @@ +{ a = { b = null; }; } diff --git a/tvix/eval/src/tests/tvix_tests/identity-null.nix b/tvix/eval/src/tests/tvix_tests/identity-null.nix new file mode 100644 index 000000000000..19765bd501b6 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/identity-null.nix @@ -0,0 +1 @@ +null diff --git a/tvix/eval/src/tests/tvix_tests/identity-signed-float.nix b/tvix/eval/src/tests/tvix_tests/identity-signed-float.nix new file mode 100644 index 000000000000..50c9d06aa52c --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/identity-signed-float.nix @@ -0,0 +1 @@ +-4.2 diff --git a/tvix/eval/src/tests/tvix_tests/identity-signed-int.nix b/tvix/eval/src/tests/tvix_tests/identity-signed-int.nix new file mode 100644 index 000000000000..6a0e60d48b17 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/identity-signed-int.nix @@ -0,0 +1 @@ +-42 diff --git a/tvix/eval/src/tests/tvix_tests/identity-string.nix b/tvix/eval/src/tests/tvix_tests/identity-string.nix new file mode 100644 index 000000000000..d71ddbcf823f --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/identity-string.nix @@ -0,0 +1 @@ +"test string" diff --git a/tvix/eval/src/tests/tvix_tests/lib.nix b/tvix/eval/src/tests/tvix_tests/lib.nix new file mode 100644 index 000000000000..028a538314b7 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/lib.nix @@ -0,0 +1,61 @@ +with builtins; + +rec { + + fold = op: nul: list: + if list == [] + then nul + else op (head list) (fold op nul (tail list)); + + concat = + fold (x: y: x + y) ""; + + and = fold (x: y: x && y) true; + + flatten = x: + if isList x + then fold (x: y: (flatten x) ++ y) [] x + else [x]; + + sum = foldl' (x: y: add x y) 0; + + hasSuffix = ext: fileName: + let lenFileName = stringLength fileName; + lenExt = stringLength ext; + in !(lessThan lenFileName lenExt) && + substring (sub lenFileName lenExt) lenFileName fileName == ext; + + # Split a list at the given position. + splitAt = pos: list: + if pos == 0 then {first = []; second = list;} else + if list == [] then {first = []; second = [];} else + let res = splitAt (sub pos 1) (tail list); + in {first = [(head list)] ++ res.first; second = res.second;}; + + # Stable merge sort. + sortBy = comp: list: + if lessThan 1 (length list) + then + let + split = splitAt (div (length list) 2) list; + first = sortBy comp split.first; + second = sortBy comp split.second; + in mergeLists comp first second + else list; + + mergeLists = comp: list1: list2: + if list1 == [] then list2 else + if list2 == [] then list1 else + if comp (head list2) (head list1) then [(head list2)] ++ mergeLists comp list1 (tail list2) else + [(head list1)] ++ mergeLists comp (tail list1) list2; + + id = x: x; + + const = x: y: x; + + range = first: last: + if first > last + then [] + else genList (n: first + n) (last - first + 1); + +} diff --git a/tvix/eval/src/tests/tvix_tests/notyetpassing/eval-okay-minimal-2.3-builtins.exp b/tvix/eval/src/tests/tvix_tests/notyetpassing/eval-okay-minimal-2.3-builtins.exp new file mode 100644 index 000000000000..967fc858bc3d --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/notyetpassing/eval-okay-minimal-2.3-builtins.exp @@ -0,0 +1 @@ +[ "abort" "add" "addErrorContext" "all" "any" "appendContext" "attrNames" "attrValues" "baseNameOf" "bitAnd" "bitOr" "bitXor" "builtins" "catAttrs" "compareVersions" "concatLists" "concatMap" "concatStringsSep" "currentSystem" "currentTime" "deepSeq" "derivation" "derivationStrict" "dirOf" "div" "elem" "elemAt" "false" "fetchGit" "fetchMercurial" "fetchTarball" "fetchurl" "filter" "filterSource" "findFile" "foldl'" "fromJSON" "fromTOML" "functionArgs" "genList" "genericClosure" "getAttr" "getContext" "getEnv" "hasAttr" "hasContext" "hashFile" "hashString" "head" "import" "intersectAttrs" "isAttrs" "isBool" "isFloat" "isFunction" "isInt" "isList" "isNull" "isPath" "isString" "langVersion" "length" "lessThan" "listToAttrs" "map" "mapAttrs" "match" "mul" "nixPath" "nixVersion" "null" "parseDrvName" "partition" "path" "pathExists" "placeholder" "readDir" "readFile" "removeAttrs" "replaceStrings" "scopedImport" "seq" "sort" "split" "splitVersion" "storeDir" "storePath" "stringLength" "sub" "substring" "tail" "throw" "toFile" "toJSON" "toPath" "toString" "toXML" "trace" "true" "tryEval" "typeOf" "unsafeDiscardOutputDependency" "unsafeDiscardStringContext" "unsafeGetAttrPos" ] diff --git a/tvix/eval/src/tests/tvix_tests/notyetpassing/eval-okay-minimal-2.3-builtins.nix b/tvix/eval/src/tests/tvix_tests/notyetpassing/eval-okay-minimal-2.3-builtins.nix new file mode 100644 index 000000000000..804355f34d10 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/notyetpassing/eval-okay-minimal-2.3-builtins.nix @@ -0,0 +1,35 @@ +# This tests verifies that the Nix implementation evaluating this has at least +# all the builtins given in `minimalBuiltins`. We don't test a precise list of +# builtins since we accept that there will always be difference between the +# builtins sets of Tvix, C++ Nix 2.3 and newer C++ Nix versions, as new builtins +# are added. +# +# Tvix also may choose never to implement some builtins if they are only useful +# for flakes or perform well enough via the shims nixpkgs usually provides. + +let + # C++ Nix 2.3 builtins except valueSize which is removed in later versions + minimalBuiltins = [ + "abort" "add" "addErrorContext" "all" "any" "appendContext" "attrNames" + "attrValues" "baseNameOf" "bitAnd" "bitOr" "bitXor" "builtins" "catAttrs" + "compareVersions" "concatLists" "concatMap" "concatStringsSep" + "currentSystem" "currentTime" "deepSeq" "derivation" "derivationStrict" + "dirOf" "div" "elem" "elemAt" "false" "fetchGit" "fetchMercurial" + "fetchTarball" "fetchurl" "filter" "filterSource" "findFile" "foldl'" + "fromJSON" "fromTOML" "functionArgs" "genList" "genericClosure" "getAttr" + "getContext" "getEnv" "hasAttr" "hasContext" "hashFile" "hashString" "head" + "import" "intersectAttrs" "isAttrs" "isBool" "isFloat" "isFunction" "isInt" + "isList" "isNull" "isPath" "isString" "langVersion" "length" "lessThan" + "listToAttrs" "map" "mapAttrs" "match" "mul" "nixPath" "nixVersion" "null" + "parseDrvName" "partition" "path" "pathExists" "placeholder" "readDir" + "readFile" "removeAttrs" "replaceStrings" "scopedImport" "seq" "sort" + "split" "splitVersion" "storeDir" "storePath" "stringLength" "sub" + "substring" "tail" "throw" "toFile" "toJSON" "toPath" "toString" "toXML" + "trace" "true" "tryEval" "typeOf" "unsafeDiscardOutputDependency" + "unsafeDiscardStringContext" "unsafeGetAttrPos" + ]; + + intersectLists = as: bs: builtins.filter (a: builtins.elem a bs) as; +in + +intersectLists minimalBuiltins (builtins.attrNames builtins) diff --git a/tvix/eval/src/tests/tvix_tests/readDir/bar b/tvix/eval/src/tests/tvix_tests/readDir/bar new file mode 100644 index 000000000000..e69de29bb2d1 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/readDir/bar diff --git a/tvix/eval/src/tests/tvix_tests/readDir/foo/.keep b/tvix/eval/src/tests/tvix_tests/readDir/foo/.keep new file mode 100644 index 000000000000..e69de29bb2d1 --- /dev/null +++ b/tvix/eval/src/tests/tvix_tests/readDir/foo/.keep diff --git a/tvix/eval/src/upvalues.rs b/tvix/eval/src/upvalues.rs new file mode 100644 index 000000000000..687d6850ccfa --- /dev/null +++ b/tvix/eval/src/upvalues.rs @@ -0,0 +1,86 @@ +//! This module encapsulates some logic for upvalue handling, which is +//! relevant to both thunks (delayed computations for lazy-evaluation) +//! as well as closures (lambdas that capture variables from the +//! surrounding scope). +//! +//! The upvalues of a scope are whatever data are needed at runtime +//! in order to resolve each free variable in the scope to a value. +//! "Upvalue" is a term taken from Lua. + +use std::ops::Index; + +use crate::{opcode::UpvalueIdx, Value}; + +/// Structure for carrying upvalues of an UpvalueCarrier. The +/// implementation of this struct encapsulates the logic for +/// capturing and accessing upvalues. +/// +/// Nix's `with` cannot be used to shadow an enclosing binding -- +/// like Rust's `use xyz::*` construct, but unlike Javascript's +/// `with (xyz)`. This means that Nix has two kinds of identifiers, +/// which can be distinguished at compile time: +/// +/// - Static identifiers, which are bound in some enclosing scope by +/// `let`, `name:` or `{name}:` +/// - Dynamic identifiers, which are not bound in any enclosing +/// scope +#[derive(Clone, Debug)] +pub struct Upvalues { + /// The upvalues of static identifiers. Each static identifier + /// is assigned an integer identifier at compile time, which is + /// an index into this Vec. + static_upvalues: Vec<Value>, + + /// The upvalues of dynamic identifiers, if any exist. This + /// consists of the value passed to each enclosing `with val;`, + /// from outermost to innermost. + with_stack: Option<Vec<Value>>, +} + +impl Upvalues { + pub fn with_capacity(count: usize) -> Self { + Upvalues { + static_upvalues: Vec::with_capacity(count), + with_stack: None, + } + } + + /// Push an upvalue at the end of the upvalue list. + pub fn push(&mut self, value: Value) { + self.static_upvalues.push(value); + } + + /// Set the captured with stack. + pub fn set_with_stack(&mut self, with_stack: Vec<Value>) { + self.with_stack = Some(with_stack); + } + + pub fn with_stack(&self) -> Option<&Vec<Value>> { + self.with_stack.as_ref() + } + + pub fn with_stack_len(&self) -> usize { + match &self.with_stack { + None => 0, + Some(stack) => stack.len(), + } + } + + /// Resolve deferred upvalues from the provided stack slice, + /// mutating them in the internal upvalue slots. + pub fn resolve_deferred_upvalues(&mut self, stack: &[Value]) { + for upvalue in self.static_upvalues.iter_mut() { + if let Value::DeferredUpvalue(update_from_idx) = upvalue { + *upvalue = stack[update_from_idx.0].clone(); + } + } + } +} + +impl Index<UpvalueIdx> for Upvalues { + type Output = Value; + + fn index(&self, index: UpvalueIdx) -> &Self::Output { + &self.static_upvalues[index.0] + } +} diff --git a/tvix/eval/src/value/arbitrary.rs b/tvix/eval/src/value/arbitrary.rs new file mode 100644 index 000000000000..bf53f4fcb28a --- /dev/null +++ b/tvix/eval/src/value/arbitrary.rs @@ -0,0 +1,106 @@ +//! Support for configurable generation of arbitrary nix values + +use imbl::proptest::{ord_map, vector}; +use proptest::{prelude::*, strategy::BoxedStrategy}; +use std::ffi::OsString; + +use super::{attrs::AttrsRep, NixAttrs, NixList, NixString, Value}; + +#[derive(Clone)] +pub enum Parameters { + Strategy(BoxedStrategy<Value>), + Parameters { + generate_internal_values: bool, + generate_functions: bool, + generate_nested: bool, + }, +} + +impl Default for Parameters { + fn default() -> Self { + Self::Parameters { + generate_internal_values: false, + generate_functions: false, + generate_nested: true, + } + } +} + +impl Arbitrary for NixAttrs { + type Parameters = Parameters; + type Strategy = BoxedStrategy<Self>; + + fn arbitrary_with(args: Self::Parameters) -> Self::Strategy { + prop_oneof![ + // Empty attrs representation + Just(Self(AttrsRep::Empty)), + // KV representation (name/value pairs) + ( + any_with::<Value>(args.clone()), + any_with::<Value>(args.clone()) + ) + .prop_map(|(name, value)| Self(AttrsRep::KV { name, value })), + // Map representation + ord_map(NixString::arbitrary(), Value::arbitrary_with(args), 0..100) + .prop_map(|map| Self(AttrsRep::Im(map))) + ] + .boxed() + } +} + +impl Arbitrary for NixList { + type Parameters = <Value as Arbitrary>::Parameters; + type Strategy = BoxedStrategy<Self>; + + fn arbitrary_with(args: Self::Parameters) -> Self::Strategy { + vector(<Value as Arbitrary>::arbitrary_with(args), 0..100) + .prop_map(NixList::from) + .boxed() + } +} + +impl Arbitrary for Value { + type Parameters = Parameters; + type Strategy = BoxedStrategy<Self>; + + fn arbitrary_with(args: Self::Parameters) -> Self::Strategy { + match args { + Parameters::Strategy(s) => s, + Parameters::Parameters { + generate_internal_values, + generate_functions, + generate_nested, + } => { + if generate_internal_values || generate_functions { + todo!("Generating internal values and functions not implemented yet") + } else if generate_nested { + non_internal_value().boxed() + } else { + leaf_value().boxed() + } + } + } + } +} + +fn leaf_value() -> impl Strategy<Value = Value> { + use Value::*; + + prop_oneof![ + Just(Null), + any::<bool>().prop_map(Bool), + any::<i64>().prop_map(Integer), + any::<f64>().prop_map(Float), + any::<NixString>().prop_map(String), + any::<OsString>().prop_map(|s| Path(Box::new(s.into()))), + ] +} + +fn non_internal_value() -> impl Strategy<Value = Value> { + leaf_value().prop_recursive(3, 5, 5, |inner| { + prop_oneof![ + NixAttrs::arbitrary_with(Parameters::Strategy(inner.clone())).prop_map(Value::attrs), + any_with::<NixList>(Parameters::Strategy(inner)).prop_map(Value::List) + ] + }) +} diff --git a/tvix/eval/src/value/attrs.rs b/tvix/eval/src/value/attrs.rs new file mode 100644 index 000000000000..bacfd22217bd --- /dev/null +++ b/tvix/eval/src/value/attrs.rs @@ -0,0 +1,609 @@ +//! This module implements Nix attribute sets. They have flexible +//! backing implementations, as they are used in very versatile +//! use-cases that are all exposed the same way in the language +//! surface. +//! +//! Due to this, construction and management of attribute sets has +//! some peculiarities that are encapsulated within this module. +use std::iter::FromIterator; + +use imbl::{ordmap, OrdMap}; +use lazy_static::lazy_static; +use serde::de::{Deserializer, Error, Visitor}; +use serde::Deserialize; + +use super::string::NixString; +use super::thunk::ThunkSet; +use super::CoercionKind; +use super::TotalDisplay; +use super::Value; +use crate::errors::ErrorKind; +use crate::generators::{self, GenCo}; + +lazy_static! { + static ref NAME_S: NixString = "name".into(); + static ref NAME_REF: &'static NixString = &NAME_S; + static ref VALUE_S: NixString = "value".into(); + static ref VALUE_REF: &'static NixString = &VALUE_S; +} + +#[cfg(test)] +mod tests; + +#[derive(Clone, Debug, Deserialize)] +pub(super) enum AttrsRep { + Empty, + + Im(OrdMap<NixString, Value>), + + /// Warning: this represents a **two**-attribute attrset, with + /// attribute names "name" and "value", like `{name="foo"; + /// value="bar";}`, *not* `{foo="bar";}`! + KV { + name: Value, + value: Value, + }, +} + +impl Default for AttrsRep { + fn default() -> Self { + AttrsRep::Empty + } +} + +impl AttrsRep { + /// Retrieve reference to a mutable map inside of an attrs, + /// optionally changing the representation if required. + fn map_mut(&mut self) -> &mut OrdMap<NixString, Value> { + match self { + AttrsRep::Im(m) => m, + + AttrsRep::Empty => { + *self = AttrsRep::Im(OrdMap::new()); + self.map_mut() + } + + AttrsRep::KV { name, value } => { + *self = AttrsRep::Im(ordmap! { + NAME_S.clone() => name.clone(), + VALUE_S.clone() => value.clone() + }); + + self.map_mut() + } + } + } + + fn select(&self, key: &str) -> Option<&Value> { + match self { + AttrsRep::Empty => None, + + AttrsRep::KV { name, value } => match key { + "name" => Some(name), + "value" => Some(value), + _ => None, + }, + + AttrsRep::Im(map) => map.get(&key.into()), + } + } + + fn contains(&self, key: &str) -> bool { + match self { + AttrsRep::Empty => false, + AttrsRep::KV { .. } => key == "name" || key == "value", + AttrsRep::Im(map) => map.contains_key(&key.into()), + } + } +} + +#[repr(transparent)] +#[derive(Clone, Debug, Default)] +pub struct NixAttrs(pub(super) AttrsRep); + +impl From<OrdMap<NixString, Value>> for NixAttrs { + fn from(map: OrdMap<NixString, Value>) -> Self { + NixAttrs(AttrsRep::Im(map)) + } +} + +impl<K, V> FromIterator<(K, V)> for NixAttrs +where + NixString: From<K>, + Value: From<V>, +{ + fn from_iter<T>(iter: T) -> NixAttrs + where + T: IntoIterator<Item = (K, V)>, + { + NixAttrs(AttrsRep::Im(iter.into_iter().collect())) + } +} + +impl TotalDisplay for NixAttrs { + fn total_fmt(&self, f: &mut std::fmt::Formatter<'_>, set: &mut ThunkSet) -> std::fmt::Result { + f.write_str("{ ")?; + + match &self.0 { + AttrsRep::KV { name, value } => { + f.write_str("name = ")?; + name.total_fmt(f, set)?; + f.write_str("; ")?; + + f.write_str("value = ")?; + value.total_fmt(f, set)?; + f.write_str("; ")?; + } + + AttrsRep::Im(map) => { + for (name, value) in map { + write!(f, "{} = ", name.ident_str())?; + value.total_fmt(f, set)?; + f.write_str("; ")?; + } + } + + AttrsRep::Empty => { /* no values to print! */ } + } + + f.write_str("}") + } +} + +impl<'de> Deserialize<'de> for NixAttrs { + fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> + where + D: Deserializer<'de>, + { + struct MapVisitor; + + impl<'de> Visitor<'de> for MapVisitor { + type Value = NixAttrs; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + formatter.write_str("a valid Nix attribute set") + } + + fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> + where + A: serde::de::MapAccess<'de>, + { + let mut stack_array = Vec::with_capacity(map.size_hint().unwrap_or(0) * 2); + + while let Some((key, value)) = map.next_entry()? { + stack_array.push(key); + stack_array.push(value); + } + + NixAttrs::construct(stack_array.len() / 2, stack_array).map_err(A::Error::custom) + } + } + + deserializer.deserialize_map(MapVisitor) + } +} + +impl NixAttrs { + pub fn empty() -> Self { + Self(AttrsRep::Empty) + } + + /// Compare two attribute sets by pointer equality. Only makes + /// sense for some attribute set reprsentations, i.e. returning + /// `false` does not mean that the two attribute sets do not have + /// equal *content*. + pub fn ptr_eq(&self, other: &Self) -> bool { + match (&self.0, &other.0) { + (AttrsRep::Im(lhs), AttrsRep::Im(rhs)) => lhs.ptr_eq(rhs), + _ => false, + } + } + + /// Return an attribute set containing the merge of the two + /// provided sets. Keys from the `other` set have precedence. + pub fn update(self, other: Self) -> Self { + // Short-circuit on some optimal cases: + match (&self.0, &other.0) { + (AttrsRep::Empty, AttrsRep::Empty) => return self, + (AttrsRep::Empty, _) => return other, + (_, AttrsRep::Empty) => return self, + (AttrsRep::KV { .. }, AttrsRep::KV { .. }) => return other, + + // Explicitly handle all branches instead of falling + // through, to ensure that we get at least some compiler + // errors if variants are modified. + (AttrsRep::Im(_), AttrsRep::Im(_)) + | (AttrsRep::Im(_), AttrsRep::KV { .. }) + | (AttrsRep::KV { .. }, AttrsRep::Im(_)) => {} + }; + + // Slightly more advanced, but still optimised updates + match (self.0, other.0) { + (AttrsRep::Im(mut m), AttrsRep::KV { name, value }) => { + m.insert(NAME_S.clone(), name); + m.insert(VALUE_S.clone(), value); + NixAttrs(AttrsRep::Im(m)) + } + + (AttrsRep::KV { name, value }, AttrsRep::Im(mut m)) => { + match m.entry(NAME_S.clone()) { + imbl::ordmap::Entry::Vacant(e) => { + e.insert(name); + } + + imbl::ordmap::Entry::Occupied(_) => { /* name from `m` has precedence */ } + }; + + match m.entry(VALUE_S.clone()) { + imbl::ordmap::Entry::Vacant(e) => { + e.insert(value); + } + + imbl::ordmap::Entry::Occupied(_) => { /* value from `m` has precedence */ } + }; + + NixAttrs(AttrsRep::Im(m)) + } + + // Plain merge of maps. + (AttrsRep::Im(m1), AttrsRep::Im(m2)) => NixAttrs(AttrsRep::Im(m2.union(m1))), + + // Cases handled above by the borrowing match: + _ => unreachable!(), + } + } + + /// Return the number of key-value entries in an attrset. + pub fn len(&self) -> usize { + match &self.0 { + AttrsRep::Im(map) => map.len(), + AttrsRep::Empty => 0, + AttrsRep::KV { .. } => 2, + } + } + + pub fn is_empty(&self) -> bool { + match &self.0 { + AttrsRep::Im(map) => map.is_empty(), + AttrsRep::Empty => true, + AttrsRep::KV { .. } => false, + } + } + + /// Select a value from an attribute set by key. + pub fn select(&self, key: &str) -> Option<&Value> { + self.0.select(key) + } + + /// Select a required value from an attribute set by key, return + /// an `AttributeNotFound` error if it is missing. + pub fn select_required(&self, key: &str) -> Result<&Value, ErrorKind> { + self.select(key) + .ok_or_else(|| ErrorKind::AttributeNotFound { name: key.into() }) + } + + pub fn contains(&self, key: &str) -> bool { + self.0.contains(key) + } + + /// Construct an iterator over all the key-value pairs in the attribute set. + #[allow(clippy::needless_lifetimes)] + pub fn iter<'a>(&'a self) -> Iter<KeyValue<'a>> { + Iter(match &self.0 { + AttrsRep::Im(map) => KeyValue::Im(map.iter()), + AttrsRep::Empty => KeyValue::Empty, + + AttrsRep::KV { + ref name, + ref value, + } => KeyValue::KV { + name, + value, + at: IterKV::default(), + }, + }) + } + + pub fn into_iter(self) -> IntoIter { + match self.0 { + AttrsRep::Empty => IntoIter(IntoIterRepr::Empty), + AttrsRep::KV { name, value } => IntoIter(IntoIterRepr::Finite( + vec![(NAME_REF.clone(), name), (VALUE_REF.clone(), value)].into_iter(), + )), + AttrsRep::Im(map) => IntoIter(IntoIterRepr::Im(map.into_iter())), + } + } + + /// Same as into_iter(), but marks call sites which rely on the + /// iteration being lexicographic. + pub fn into_iter_sorted(self) -> IntoIter { + self.into_iter() + } + + /// Construct an iterator over all the keys of the attribute set + pub fn keys(&self) -> Keys { + Keys(match &self.0 { + AttrsRep::Empty => KeysInner::Empty, + AttrsRep::Im(m) => KeysInner::Im(m.keys()), + AttrsRep::KV { .. } => KeysInner::KV(IterKV::default()), + }) + } + + /// Implement construction logic of an attribute set, to encapsulate + /// logic about attribute set optimisations inside of this module. + pub fn construct(count: usize, mut stack_slice: Vec<Value>) -> Result<Self, ErrorKind> { + debug_assert!( + stack_slice.len() == count * 2, + "construct_attrs called with count == {}, but slice.len() == {}", + count, + stack_slice.len(), + ); + + // Optimisation: Empty attribute set + if count == 0 { + return Ok(NixAttrs(AttrsRep::Empty)); + } + + // Optimisation: KV pattern + if count == 2 { + if let Some(kv) = attempt_optimise_kv(&mut stack_slice) { + return Ok(kv); + } + } + + let mut attrs = NixAttrs(AttrsRep::Im(OrdMap::new())); + + for _ in 0..count { + let value = stack_slice.pop().unwrap(); + let key = stack_slice.pop().unwrap(); + + match key { + Value::String(ks) => set_attr(&mut attrs, ks, value)?, + + Value::Null => { + // This is in fact valid, but leads to the value + // being ignored and nothing being set, i.e. `{ + // ${null} = 1; } => { }`. + continue; + } + + other => return Err(ErrorKind::InvalidAttributeName(other)), + } + } + + Ok(attrs) + } + + /// Construct an optimized "KV"-style attribute set given the value for the + /// `"name"` key, and the value for the `"value"` key + pub(crate) fn from_kv(name: Value, value: Value) -> Self { + NixAttrs(AttrsRep::KV { name, value }) + } + + /// Attempt to coerce an attribute set with a `__toString` + /// attribute to a string. + pub(crate) async fn try_to_string(&self, co: &GenCo, kind: CoercionKind) -> Option<NixString> { + if let Some(to_string) = self.select("__toString") { + let callable = generators::request_force(&co, to_string.clone()).await; + + // Leave the attribute set on the stack as an argument + // to the function call. + generators::request_stack_push(&co, Value::Attrs(Box::new(self.clone()))).await; + + // Call the callable ... + let result = generators::request_call(&co, callable).await; + + // Recurse on the result, as attribute set coercion + // actually works recursively, e.g. you can even return + // /another/ set with a __toString attr. + let s = generators::request_string_coerce(&co, result, kind).await; + + return Some(s); + } + + None + } +} + +/// In Nix, name/value attribute pairs are frequently constructed from +/// literals. This particular case should avoid allocation of a map, +/// additional heap values etc. and use the optimised `KV` variant +/// instead. +/// +/// ```norust +/// `slice` is the top of the stack from which the attrset is being +/// constructed, e.g. +/// +/// slice: [ "value" 5 "name" "foo" ] +/// index: 0 1 2 3 +/// stack: 3 2 1 0 +/// ``` +fn attempt_optimise_kv(slice: &mut [Value]) -> Option<NixAttrs> { + let (name_idx, value_idx) = { + match (&slice[2], &slice[0]) { + (Value::String(s1), Value::String(s2)) if (*s1 == *NAME_S && *s2 == *VALUE_S) => (3, 1), + + (Value::String(s1), Value::String(s2)) if (*s1 == *VALUE_S && *s2 == *NAME_S) => (1, 3), + + // Technically this branch lets type errors pass, + // but they will be caught during normal attribute + // set construction instead. + _ => return None, + } + }; + + Some(NixAttrs::from_kv( + slice[name_idx].clone(), + slice[value_idx].clone(), + )) +} + +/// Set an attribute on an in-construction attribute set, while +/// checking against duplicate keys. +fn set_attr(attrs: &mut NixAttrs, key: NixString, value: Value) -> Result<(), ErrorKind> { + match attrs.0.map_mut().entry(key) { + imbl::ordmap::Entry::Occupied(entry) => Err(ErrorKind::DuplicateAttrsKey { + key: entry.key().as_str().to_string(), + }), + + imbl::ordmap::Entry::Vacant(entry) => { + entry.insert(value); + Ok(()) + } + } +} + +/// Internal helper type to track the iteration status of an iterator +/// over the name/value representation. +#[derive(Debug, Default)] +pub enum IterKV { + #[default] + Name, + Value, + Done, +} + +impl IterKV { + fn next(&mut self) { + match *self { + Self::Name => *self = Self::Value, + Self::Value => *self = Self::Done, + Self::Done => {} + } + } +} + +/// Iterator representation over the keys *and* values of an attribute +/// set. +pub enum KeyValue<'a> { + Empty, + + KV { + name: &'a Value, + value: &'a Value, + at: IterKV, + }, + + Im(imbl::ordmap::Iter<'a, NixString, Value>), +} + +/// Iterator over a Nix attribute set. +// This wrapper type exists to make the inner "raw" iterator +// inaccessible. +#[repr(transparent)] +pub struct Iter<T>(T); + +impl<'a> Iterator for Iter<KeyValue<'a>> { + type Item = (&'a NixString, &'a Value); + + fn next(&mut self) -> Option<Self::Item> { + match &mut self.0 { + KeyValue::Im(inner) => inner.next(), + KeyValue::Empty => None, + + KeyValue::KV { name, value, at } => match at { + IterKV::Name => { + at.next(); + Some((&NAME_REF, name)) + } + + IterKV::Value => { + at.next(); + Some((&VALUE_REF, value)) + } + + IterKV::Done => None, + }, + } + } +} + +impl<'a> ExactSizeIterator for Iter<KeyValue<'a>> { + fn len(&self) -> usize { + match &self.0 { + KeyValue::Empty => 0, + KeyValue::KV { .. } => 2, + KeyValue::Im(inner) => inner.len(), + } + } +} + +enum KeysInner<'a> { + Empty, + KV(IterKV), + Im(imbl::ordmap::Keys<'a, NixString, Value>), +} + +pub struct Keys<'a>(KeysInner<'a>); + +impl<'a> Iterator for Keys<'a> { + type Item = &'a NixString; + + fn next(&mut self) -> Option<Self::Item> { + match &mut self.0 { + KeysInner::Empty => None, + KeysInner::KV(at @ IterKV::Name) => { + at.next(); + Some(&NAME_REF) + } + KeysInner::KV(at @ IterKV::Value) => { + at.next(); + Some(&VALUE_REF) + } + KeysInner::KV(IterKV::Done) => None, + KeysInner::Im(m) => m.next(), + } + } +} + +impl<'a> IntoIterator for &'a NixAttrs { + type Item = (&'a NixString, &'a Value); + + type IntoIter = Iter<KeyValue<'a>>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +impl<'a> ExactSizeIterator for Keys<'a> { + fn len(&self) -> usize { + match &self.0 { + KeysInner::Empty => 0, + KeysInner::KV(_) => 2, + KeysInner::Im(m) => m.len(), + } + } +} + +/// Internal representation of an owning attrset iterator +pub enum IntoIterRepr { + Empty, + Finite(std::vec::IntoIter<(NixString, Value)>), + Im(imbl::ordmap::ConsumingIter<(NixString, Value)>), +} + +#[repr(transparent)] +pub struct IntoIter(IntoIterRepr); + +impl Iterator for IntoIter { + type Item = (NixString, Value); + + fn next(&mut self) -> Option<Self::Item> { + match &mut self.0 { + IntoIterRepr::Empty => None, + IntoIterRepr::Finite(inner) => inner.next(), + IntoIterRepr::Im(inner) => inner.next(), + } + } +} + +impl ExactSizeIterator for IntoIter { + fn len(&self) -> usize { + match &self.0 { + IntoIterRepr::Empty => 0, + IntoIterRepr::Finite(inner) => inner.len(), + IntoIterRepr::Im(inner) => inner.len(), + } + } +} diff --git a/tvix/eval/src/value/attrs/tests.rs b/tvix/eval/src/value/attrs/tests.rs new file mode 100644 index 000000000000..473592c519df --- /dev/null +++ b/tvix/eval/src/value/attrs/tests.rs @@ -0,0 +1,108 @@ +use super::*; + +#[test] +fn test_empty_attrs() { + let attrs = NixAttrs::construct(0, vec![]).expect("empty attr construction should succeed"); + + assert!( + matches!(attrs, NixAttrs(AttrsRep::Empty)), + "empty attribute set should use optimised representation" + ); +} + +#[test] +fn test_simple_attrs() { + let attrs = NixAttrs::construct( + 1, + vec![Value::String("key".into()), Value::String("value".into())], + ) + .expect("simple attr construction should succeed"); + + assert!( + matches!(attrs, NixAttrs(AttrsRep::Im(_))), + "simple attribute set should use map representation", + ) +} + +#[test] +fn test_kv_attrs() { + let name_val = Value::String("name".into()); + let value_val = Value::String("value".into()); + let meaning_val = Value::String("meaning".into()); + let forty_two_val = Value::Integer(42); + + let kv_attrs = NixAttrs::construct( + 2, + vec![ + value_val, + forty_two_val.clone(), + name_val, + meaning_val.clone(), + ], + ) + .expect("constructing K/V pair attrs should succeed"); + + match kv_attrs { + NixAttrs(AttrsRep::KV { name, value }) + if name.to_str().unwrap() == meaning_val.to_str().unwrap() + || value.to_str().unwrap() == forty_two_val.to_str().unwrap() => {} + + _ => panic!( + "K/V attribute set should use optimised representation, but got {:?}", + kv_attrs + ), + } +} + +#[test] +fn test_empty_attrs_iter() { + let attrs = NixAttrs::construct(0, vec![]).unwrap(); + assert!(attrs.iter().next().is_none()); +} + +#[test] +fn test_kv_attrs_iter() { + let name_val = Value::String("name".into()); + let value_val = Value::String("value".into()); + let meaning_val = Value::String("meaning".into()); + let forty_two_val = Value::Integer(42); + + let kv_attrs = NixAttrs::construct( + 2, + vec![ + value_val, + forty_two_val.clone(), + name_val, + meaning_val.clone(), + ], + ) + .expect("constructing K/V pair attrs should succeed"); + + let mut iter = kv_attrs + .iter() + .collect::<Vec<_>>() + .into_iter() + .map(|(k, v)| (k, v)); + let (k, v) = iter.next().unwrap(); + assert!(k == *NAME_REF); + assert!(v.to_str().unwrap() == meaning_val.to_str().unwrap()); + let (k, v) = iter.next().unwrap(); + assert!(k == *VALUE_REF); + assert!(v.as_int().unwrap() == forty_two_val.as_int().unwrap()); + assert!(iter.next().is_none()); +} + +#[test] +fn test_map_attrs_iter() { + let attrs = NixAttrs::construct( + 1, + vec![Value::String("key".into()), Value::String("value".into())], + ) + .expect("simple attr construction should succeed"); + + let mut iter = attrs.iter().collect::<Vec<_>>().into_iter(); + let (k, v) = iter.next().unwrap(); + assert!(k == &NixString::from("key")); + assert!(v.to_str().unwrap().as_str() == "value"); + assert!(iter.next().is_none()); +} diff --git a/tvix/eval/src/value/builtin.rs b/tvix/eval/src/value/builtin.rs new file mode 100644 index 000000000000..6d08ebf9506d --- /dev/null +++ b/tvix/eval/src/value/builtin.rs @@ -0,0 +1,136 @@ +//! This module implements the runtime representation of a Nix +//! builtin. +//! +//! Builtins are directly backed by Rust code operating on Nix values. + +use crate::vm::generators::Generator; + +use super::Value; + +use std::{ + fmt::{Debug, Display}, + rc::Rc, +}; + +/// Trait for closure types of builtins. +/// +/// Builtins are expected to yield a generator which can be run by the VM to +/// produce the final value. +/// +/// Implementors should use the builtins-macros to create these functions +/// instead of handling the argument-passing logic manually. +pub trait BuiltinGen: Fn(Vec<Value>) -> Generator {} +impl<F: Fn(Vec<Value>) -> Generator> BuiltinGen for F {} + +#[derive(Clone)] +pub struct BuiltinRepr { + name: &'static str, + /// Optional documentation for the builtin. + documentation: Option<&'static str>, + arg_count: usize, + + func: Rc<dyn BuiltinGen>, + + /// Partially applied function arguments. + partials: Vec<Value>, +} + +pub enum BuiltinResult { + /// Builtin was not ready to be called (arguments missing) and remains + /// partially applied. + Partial(Builtin), + + /// Builtin was called and constructed a generator that the VM must run. + Called(&'static str, Generator), +} + +/// Represents a single built-in function which directly executes Rust +/// code that operates on a Nix value. +/// +/// Builtins are the only functions in Nix that have varying arities +/// (for example, `hasAttr` has an arity of 2, but `isAttrs` an arity +/// of 1). To facilitate this generically, builtins expect to be +/// called with a vector of Nix values corresponding to their +/// arguments in order. +/// +/// Partially applied builtins act similar to closures in that they +/// "capture" the partially applied arguments, and are treated +/// specially when printing their representation etc. +#[derive(Clone)] +pub struct Builtin(Box<BuiltinRepr>); + +impl From<BuiltinRepr> for Builtin { + fn from(value: BuiltinRepr) -> Self { + Builtin(Box::new(value)) + } +} + +impl Builtin { + pub fn new<F: BuiltinGen + 'static>( + name: &'static str, + documentation: Option<&'static str>, + arg_count: usize, + func: F, + ) -> Self { + BuiltinRepr { + name, + documentation, + arg_count, + func: Rc::new(func), + partials: vec![], + } + .into() + } + + pub fn name(&self) -> &'static str { + self.0.name + } + + pub fn documentation(&self) -> Option<&'static str> { + self.0.documentation + } + + /// Apply an additional argument to the builtin. After this, [`call`] *must* + /// be called, otherwise it may leave the builtin in an incorrect state. + pub fn apply_arg(&mut self, arg: Value) { + self.0.partials.push(arg); + + debug_assert!( + self.0.partials.len() <= self.0.arg_count, + "Tvix bug: pushed too many arguments to builtin" + ); + } + + /// Attempt to call a builtin, which will produce a generator if it is fully + /// applied or return the builtin if it is partially applied. + pub fn call(self) -> BuiltinResult { + if self.0.partials.len() == self.0.arg_count { + BuiltinResult::Called(self.0.name, (self.0.func)(self.0.partials)) + } else { + BuiltinResult::Partial(self) + } + } +} + +impl Debug for Builtin { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "builtin[{}]", self.0.name) + } +} + +impl Display for Builtin { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if !self.0.partials.is_empty() { + f.write_str("<<primop-app>>") + } else { + f.write_str("<<primop>>") + } + } +} + +/// Builtins are uniquely identified by their name +impl PartialEq for Builtin { + fn eq(&self, other: &Self) -> bool { + self.0.name == other.0.name + } +} diff --git a/tvix/eval/src/value/function.rs b/tvix/eval/src/value/function.rs new file mode 100644 index 000000000000..1a32795393bf --- /dev/null +++ b/tvix/eval/src/value/function.rs @@ -0,0 +1,108 @@ +//! This module implements the runtime representation of functions. +use std::{collections::HashMap, hash::Hash, rc::Rc}; + +use codemap::Span; +use smol_str::SmolStr; + +use crate::{chunk::Chunk, upvalues::Upvalues}; + +use super::NixString; + +#[derive(Clone, Debug, PartialEq)] +pub(crate) struct Formals { + /// Map from argument name, to whether that argument is required + pub(crate) arguments: HashMap<NixString, bool>, + + /// Do the formals of this function accept extra arguments + pub(crate) ellipsis: bool, + + /// The span of the formals themselves, to use to emit errors + pub(crate) span: Span, +} + +impl Formals { + /// Returns true if the given arg is a valid argument to these formals. + /// + /// This is true if it is either listed in the list of arguments, or the formals have an + /// ellipsis + pub(crate) fn contains<Q>(&self, arg: &Q) -> bool + where + Q: ?Sized + Hash + Eq, + NixString: std::borrow::Borrow<Q>, + { + self.ellipsis || self.arguments.contains_key(arg) + } +} + +/// The opcodes for a thunk or closure, plus the number of +/// non-executable opcodes which are allowed after an OpThunkClosure or +/// OpThunkSuspended referencing it. At runtime `Lambda` is usually wrapped +/// in `Rc` to avoid copying the `Chunk` it holds (which can be +/// quite large). +/// +/// In order to correctly reproduce cppnix's "pointer equality" +/// semantics it is important that we never clone a Lambda -- +/// use Rc<Lambda>::clone() instead. This struct deliberately +/// does not `derive(Clone)` in order to prevent this from being +/// done accidentally. +/// +#[derive(/* do not add Clone here */ Debug, Default)] +pub struct Lambda { + pub(crate) chunk: Chunk, + + /// Name of the function (equivalent to the name of the + /// identifier (e.g. a value in a let-expression or an attribute + /// set entry) it is located in). + pub(crate) name: Option<SmolStr>, + + /// Number of upvalues which the code in this Lambda closes + /// over, and which need to be initialised at + /// runtime. Information about the variables is emitted using + /// data-carrying opcodes (see [`OpCode::DataStackIdx`]). + pub(crate) upvalue_count: usize, + pub(crate) formals: Option<Formals>, +} + +impl Lambda { + pub fn chunk(&mut self) -> &mut Chunk { + &mut self.chunk + } +} + +/// +/// In order to correctly reproduce cppnix's "pointer equality" +/// semantics it is important that we never clone a Lambda -- +/// use Rc<Lambda>::clone() instead. This struct deliberately +/// does not `derive(Clone)` in order to prevent this from being +/// done accidentally. +/// +#[derive(/* do not add Clone here */ Debug)] +pub struct Closure { + pub lambda: Rc<Lambda>, + pub upvalues: Rc<Upvalues>, +} + +impl Closure { + pub fn new(lambda: Rc<Lambda>) -> Self { + Self::new_with_upvalues( + Rc::new(Upvalues::with_capacity(lambda.upvalue_count)), + lambda, + ) + } + + pub fn new_with_upvalues(upvalues: Rc<Upvalues>, lambda: Rc<Lambda>) -> Self { + Closure { upvalues, lambda } + } + + pub fn chunk(&self) -> &Chunk { + &self.lambda.chunk + } + + pub fn lambda(&self) -> Rc<Lambda> { + self.lambda.clone() + } + + pub fn upvalues(&self) -> Rc<Upvalues> { + self.upvalues.clone() + } +} diff --git a/tvix/eval/src/value/json.rs b/tvix/eval/src/value/json.rs new file mode 100644 index 000000000000..33e16ebffcd0 --- /dev/null +++ b/tvix/eval/src/value/json.rs @@ -0,0 +1,84 @@ +/// Implementation of Value serialisation *to* JSON. +/// +/// This can not be implemented through standard serde-derive methods, +/// as there is internal Nix logic that must happen within the +/// serialisation methods. +use super::{CoercionKind, Value}; +use crate::generators::{self, GenCo}; +use crate::ErrorKind; + +use serde_json::value::to_value; +use serde_json::Value as Json; +use serde_json::{Map, Number}; // name clash with *our* `Value` + +impl Value { + pub(crate) async fn to_json(self, co: &GenCo) -> Result<Json, ErrorKind> { + let self_forced = generators::request_force(co, self).await; + + let value = match self_forced { + Value::Null => Json::Null, + Value::Bool(b) => Json::Bool(b), + Value::Integer(i) => Json::Number(Number::from(i)), + Value::Float(f) => to_value(f)?, + Value::String(s) => Json::String(s.as_str().into()), + + Value::Path(p) => { + let imported = generators::request_path_import(co, *p).await; + Json::String(imported.to_string_lossy().to_string()) + } + + Value::List(l) => { + let mut out = vec![]; + + for val in l.into_iter() { + out.push(generators::request_to_json(co, val).await); + } + + Json::Array(out) + } + + Value::Attrs(attrs) => { + // Attribute sets with a callable `__toString` attribute + // serialise to the string-coerced version of the result of + // calling that. + if let Some(s) = attrs.try_to_string(&co, CoercionKind::Weak).await { + return Ok(Json::String(s.as_str().to_string())); + } + + // Attribute sets with an `outPath` attribute + // serialise to a JSON serialisation of that inner + // value (regardless of what it is!). + if let Some(out_path) = attrs.select("outPath") { + return Ok(generators::request_to_json(co, out_path.clone()).await); + } + + let mut out = Map::with_capacity(attrs.len()); + for (name, value) in attrs.into_iter_sorted() { + out.insert( + name.as_str().to_string(), + generators::request_to_json(co, value).await, + ); + } + + Json::Object(out) + } + + val @ Value::Closure(_) + | val @ Value::Thunk(_) + | val @ Value::Builtin(_) + | val @ Value::AttrNotFound + | val @ Value::Blueprint(_) + | val @ Value::DeferredUpvalue(_) + | val @ Value::UnresolvedPath(_) + | val @ Value::Json(_) => return Err(ErrorKind::NotSerialisableToJson(val.type_of())), + }; + + Ok(value) + } + + /// Generator version of the above, which wraps responses in + /// Value::Json. + pub(crate) async fn to_json_generator(self, co: GenCo) -> Result<Value, ErrorKind> { + Ok(Value::Json(self.to_json(&co).await?)) + } +} diff --git a/tvix/eval/src/value/list.rs b/tvix/eval/src/value/list.rs new file mode 100644 index 000000000000..cfaefff82195 --- /dev/null +++ b/tvix/eval/src/value/list.rs @@ -0,0 +1,152 @@ +//! This module implements Nix lists. +use std::ops::Index; +use std::rc::Rc; + +use imbl::{vector, Vector}; + +use serde::Deserialize; + +use crate::generators; +use crate::generators::GenCo; +use crate::AddContext; +use crate::ErrorKind; + +use super::thunk::ThunkSet; +use super::TotalDisplay; +use super::Value; + +#[repr(transparent)] +#[derive(Clone, Debug, Deserialize)] +pub struct NixList(Rc<Vector<Value>>); + +impl TotalDisplay for NixList { + fn total_fmt(&self, f: &mut std::fmt::Formatter<'_>, set: &mut ThunkSet) -> std::fmt::Result { + f.write_str("[ ")?; + + for v in self { + v.total_fmt(f, set)?; + f.write_str(" ")?; + } + + f.write_str("]") + } +} + +impl From<Vector<Value>> for NixList { + fn from(vs: Vector<Value>) -> Self { + Self(Rc::new(vs)) + } +} + +impl NixList { + pub fn len(&self) -> usize { + self.0.len() + } + + pub fn get(&self, i: usize) -> Option<&Value> { + self.0.get(i) + } + + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + pub fn construct(count: usize, stack_slice: Vec<Value>) -> Self { + debug_assert!( + count == stack_slice.len(), + "NixList::construct called with count == {}, but slice.len() == {}", + count, + stack_slice.len(), + ); + + NixList(Rc::new(Vector::from_iter(stack_slice.into_iter()))) + } + + pub fn iter(&self) -> vector::Iter<Value> { + self.0.iter() + } + + pub fn ptr_eq(&self, other: &Self) -> bool { + Rc::ptr_eq(&self.0, &other.0) + } + + pub fn into_inner(self) -> Vector<Value> { + Rc::try_unwrap(self.0).unwrap_or_else(|rc| (*rc).clone()) + } + + #[deprecated(note = "callers should avoid constructing from Vec")] + pub fn from_vec(vs: Vec<Value>) -> Self { + Self(Rc::new(Vector::from_iter(vs.into_iter()))) + } + + /// Asynchronous sorting algorithm in which the comparator can make use of + /// VM requests (required as `builtins.sort` uses comparators written in + /// Nix). + /// + /// This is a simple, optimised bubble sort implementation. The choice of + /// algorithm is constrained by the comparator in Nix not being able to + /// yield equality, and us being unable to use the standard library + /// implementation of sorting (which is a lot longer, but a lot more + /// efficient) here. + // TODO(amjoseph): Investigate potential impl in Nix code, or Tvix bytecode. + pub async fn sort_by(self, co: &GenCo, cmp: Value) -> Result<Self, ErrorKind> { + let mut len = self.len(); + let mut data = self.into_inner(); + + loop { + let mut new_len = 0; + for i in 1..len { + if generators::request_force( + co, + generators::request_call_with( + co, + cmp.clone(), + [data[i].clone(), data[i - 1].clone()], + ) + .await, + ) + .await + .as_bool() + .context("evaluating comparator in `builtins.sort`")? + { + data.swap(i, i - 1); + new_len = i; + } + } + + if new_len == 0 { + break; + } + + len = new_len; + } + + Ok(data.into()) + } +} + +impl IntoIterator for NixList { + type Item = Value; + type IntoIter = imbl::vector::ConsumingIter<Value>; + + fn into_iter(self) -> Self::IntoIter { + self.into_inner().into_iter() + } +} + +impl<'a> IntoIterator for &'a NixList { + type Item = &'a Value; + type IntoIter = imbl::vector::Iter<'a, Value>; + + fn into_iter(self) -> Self::IntoIter { + self.0.iter() + } +} + +impl Index<usize> for NixList { + type Output = Value; + + fn index(&self, index: usize) -> &Self::Output { + &self.0[index] + } +} diff --git a/tvix/eval/src/value/mod.rs b/tvix/eval/src/value/mod.rs new file mode 100644 index 000000000000..77d40b0e3906 --- /dev/null +++ b/tvix/eval/src/value/mod.rs @@ -0,0 +1,842 @@ +//! This module implements the backing representation of runtime +//! values in the Nix language. +use std::cmp::Ordering; +use std::fmt::Display; +use std::num::{NonZeroI32, NonZeroUsize}; +use std::path::PathBuf; +use std::rc::Rc; + +use lexical_core::format::CXX_LITERAL; +use serde::Deserialize; + +#[cfg(feature = "arbitrary")] +mod arbitrary; +mod attrs; +mod builtin; +mod function; +mod json; +mod list; +mod path; +mod string; +mod thunk; + +use crate::errors::ErrorKind; +use crate::opcode::StackIdx; +use crate::spans::LightSpan; +use crate::vm::generators::{self, GenCo}; +use crate::AddContext; +pub use attrs::NixAttrs; +pub use builtin::{Builtin, BuiltinResult}; +pub(crate) use function::Formals; +pub use function::{Closure, Lambda}; +pub use list::NixList; +pub use path::canon_path; +pub use string::NixString; +pub use thunk::Thunk; + +pub use self::thunk::{SharedThunkSet, ThunkSet}; + +use lazy_static::lazy_static; + +#[warn(variant_size_differences)] +#[derive(Clone, Debug, Deserialize)] +#[serde(untagged)] +pub enum Value { + Null, + Bool(bool), + Integer(i64), + Float(f64), + String(NixString), + + #[serde(skip)] + Path(Box<PathBuf>), + Attrs(Box<NixAttrs>), + List(NixList), + + #[serde(skip)] + Closure(Rc<Closure>), // must use Rc<Closure> here in order to get proper pointer equality + + #[serde(skip)] + Builtin(Builtin), + + // Internal values that, while they technically exist at runtime, + // are never returned to or created directly by users. + #[serde(skip_deserializing)] + Thunk(Thunk), + + // See [`compiler::compile_select_or()`] for explanation + #[serde(skip)] + AttrNotFound, + + // this can only occur in Chunk::Constants and nowhere else + #[serde(skip)] + Blueprint(Rc<Lambda>), + + #[serde(skip)] + DeferredUpvalue(StackIdx), + #[serde(skip)] + UnresolvedPath(Box<PathBuf>), + #[serde(skip)] + Json(serde_json::Value), +} + +lazy_static! { + static ref WRITE_FLOAT_OPTIONS: lexical_core::WriteFloatOptions = + lexical_core::WriteFloatOptionsBuilder::new() + .trim_floats(true) + .round_mode(lexical_core::write_float_options::RoundMode::Round) + .positive_exponent_break(Some(NonZeroI32::new(5).unwrap())) + .max_significant_digits(Some(NonZeroUsize::new(6).unwrap())) + .build() + .unwrap(); +} + +// Helper macros to generate the to_*/as_* macros while accounting for +// thunks. + +/// Generate an `as_*` method returning a reference to the expected +/// type, or a type error. This only works for types that implement +/// `Copy`, as returning a reference to an inner thunk value is not +/// possible. + +/// Generate an `as_*/to_*` accessor method that returns either the +/// expected type, or a type error. +macro_rules! gen_cast { + ( $name:ident, $type:ty, $expected:expr, $variant:pat, $result:expr ) => { + pub fn $name(&self) -> Result<$type, ErrorKind> { + match self { + $variant => Ok($result), + Value::Thunk(thunk) => Self::$name(&thunk.value()), + other => Err(type_error($expected, &other)), + } + } + }; +} + +/// Generate an `as_*_mut/to_*_mut` accessor method that returns either the +/// expected type, or a type error. +macro_rules! gen_cast_mut { + ( $name:ident, $type:ty, $expected:expr, $variant:ident) => { + pub fn $name(&mut self) -> Result<&mut $type, ErrorKind> { + match self { + Value::$variant(x) => Ok(x), + other => Err(type_error($expected, &other)), + } + } + }; +} + +/// Generate an `is_*` type-checking method. +macro_rules! gen_is { + ( $name:ident, $variant:pat ) => { + pub fn $name(&self) -> bool { + match self { + $variant => true, + Value::Thunk(thunk) => Self::$name(&thunk.value()), + _ => false, + } + } + }; +} + +/// Describes what input types are allowed when coercing a `Value` to a string +#[derive(Clone, Copy, PartialEq, Debug)] +pub enum CoercionKind { + /// Only coerce already "stringly" types like strings and paths, but also + /// coerce sets that have a `__toString` attribute. Equivalent to + /// `!coerceMore` in C++ Nix. + Weak, + /// Coerce all value types included by `Weak`, but also coerce `null`, + /// booleans, integers, floats and lists of coercible types. Equivalent to + /// `coerceMore` in C++ Nix. + Strong, +} + +impl<T> From<T> for Value +where + T: Into<NixString>, +{ + fn from(t: T) -> Self { + Self::String(t.into()) + } +} + +/// Constructors +impl Value { + /// Construct a [`Value::Attrs`] from a [`NixAttrs`]. + pub fn attrs(attrs: NixAttrs) -> Self { + Self::Attrs(Box::new(attrs)) + } +} + +/// Controls what kind of by-pointer equality comparison is allowed. +/// +/// See `//tvix/docs/value-pointer-equality.md` for details. +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub enum PointerEquality { + /// Pointer equality not allowed at all. + ForbidAll, + + /// Pointer equality comparisons only allowed for nested values. + AllowNested, + + /// Pointer equality comparisons are allowed in all contexts. + AllowAll, +} + +impl Value { + // TODO(amjoseph): de-asyncify this (when called directly by the VM) + /// Deeply forces a value, traversing e.g. lists and attribute sets and forcing + /// their contents, too. + /// + /// This is a generator function. + pub(super) async fn deep_force( + self, + co: GenCo, + thunk_set: SharedThunkSet, + ) -> Result<Value, ErrorKind> { + // Get rid of any top-level thunks, and bail out of self-recursive + // thunks. + let value = if let Value::Thunk(ref t) = &self { + if !thunk_set.insert(t) { + return Ok(self); + } + generators::request_force(&co, self).await + } else { + self + }; + + match &value { + // Short-circuit on already evaluated values, or fail on internal values. + Value::Null + | Value::Bool(_) + | Value::Integer(_) + | Value::Float(_) + | Value::String(_) + | Value::Path(_) + | Value::Closure(_) + | Value::Builtin(_) => return Ok(value), + + Value::List(list) => { + for val in list { + generators::request_deep_force(&co, val.clone(), thunk_set.clone()).await; + } + } + + Value::Attrs(attrs) => { + for (_, val) in attrs.iter() { + generators::request_deep_force(&co, val.clone(), thunk_set.clone()).await; + } + } + + Value::Thunk(_) => panic!("Tvix bug: force_value() returned a thunk"), + + Value::AttrNotFound + | Value::Blueprint(_) + | Value::DeferredUpvalue(_) + | Value::UnresolvedPath(_) + | Value::Json(_) => panic!( + "Tvix bug: internal value left on stack: {}", + value.type_of() + ), + }; + + Ok(value) + } + + // TODO(amjoseph): de-asyncify this (when called directly by the VM) + /// Coerce a `Value` to a string. See `CoercionKind` for a rundown of what + /// input types are accepted under what circumstances. + pub async fn coerce_to_string(self, co: GenCo, kind: CoercionKind) -> Result<Value, ErrorKind> { + let value = generators::request_force(&co, self).await; + + match (value, kind) { + // coercions that are always done + tuple @ (Value::String(_), _) => Ok(tuple.0), + + // TODO(sterni): Think about proper encoding handling here. This needs + // general consideration anyways, since one current discrepancy between + // C++ Nix and Tvix is that the former's strings are arbitrary byte + // sequences without NUL bytes, whereas Tvix only allows valid + // Unicode. See also b/189. + (Value::Path(p), _) => { + // TODO(tazjin): there are cases where coerce_to_string does not import + let imported = generators::request_path_import(&co, *p).await; + Ok(imported.to_string_lossy().into_owned().into()) + } + + // Attribute sets can be converted to strings if they either have an + // `__toString` attribute which holds a function that receives the + // set itself or an `outPath` attribute which should be a string. + // `__toString` is preferred. + (Value::Attrs(attrs), kind) => { + if let Some(s) = attrs.try_to_string(&co, kind).await { + return Ok(Value::String(s)); + } + + if let Some(out_path) = attrs.select("outPath") { + let s = generators::request_string_coerce(&co, out_path.clone(), kind).await; + return Ok(Value::String(s)); + } + + Err(ErrorKind::NotCoercibleToString { from: "set", kind }) + } + + // strong coercions + (Value::Null, CoercionKind::Strong) | (Value::Bool(false), CoercionKind::Strong) => { + Ok("".into()) + } + (Value::Bool(true), CoercionKind::Strong) => Ok("1".into()), + + (Value::Integer(i), CoercionKind::Strong) => Ok(format!("{i}").into()), + (Value::Float(f), CoercionKind::Strong) => { + // contrary to normal Display, coercing a float to a string will + // result in unconditional 6 decimal places + Ok(format!("{:.6}", f).into()) + } + + // Lists are coerced by coercing their elements and interspersing spaces + (Value::List(list), CoercionKind::Strong) => { + let mut out = String::new(); + + for (idx, elem) in list.into_iter().enumerate() { + if idx > 0 { + out.push(' '); + } + + let s = generators::request_string_coerce(&co, elem, kind).await; + out.push_str(s.as_str()); + } + + Ok(Value::String(out.into())) + } + + (Value::Thunk(_), _) => panic!("Tvix bug: force returned unforced thunk"), + + val @ (Value::Closure(_), _) + | val @ (Value::Builtin(_), _) + | val @ (Value::Null, _) + | val @ (Value::Bool(_), _) + | val @ (Value::Integer(_), _) + | val @ (Value::Float(_), _) + | val @ (Value::List(_), _) => Err(ErrorKind::NotCoercibleToString { + from: val.0.type_of(), + kind, + }), + + (Value::AttrNotFound, _) + | (Value::Blueprint(_), _) + | (Value::DeferredUpvalue(_), _) + | (Value::UnresolvedPath(_), _) + | (Value::Json(_), _) => { + panic!("tvix bug: .coerce_to_string() called on internal value") + } + } + } + + // TODO(amjoseph): de-asyncify this (when called directly by the VM) + /// Compare two Nix values for equality, forcing nested parts of the structure + /// as needed. + /// + /// This comparison needs to be invoked for nested values (e.g. in lists and + /// attribute sets) as well, which is done by suspending and asking the VM to + /// perform the nested comparison. + /// + /// The `top_level` parameter controls whether this invocation is the top-level + /// comparison, or a nested value comparison. See + /// `//tvix/docs/value-pointer-equality.md` + pub(crate) async fn nix_eq( + self, + other: Value, + co: GenCo, + ptr_eq: PointerEquality, + ) -> Result<Value, ErrorKind> { + let a = match self { + Value::Thunk(ref thunk) => { + // If both values are thunks, and thunk comparisons are allowed by + // pointer, do that and move on. + if ptr_eq == PointerEquality::AllowAll { + if let Value::Thunk(t1) = &other { + if t1.ptr_eq(thunk) { + return Ok(Value::Bool(true)); + } + } + }; + + generators::request_force(&co, self).await + } + + _ => self, + }; + + let b = match other { + Value::Thunk(_) => generators::request_force(&co, other).await, + _ => other, + }; + + debug_assert!(!matches!(a, Value::Thunk(_))); + debug_assert!(!matches!(b, Value::Thunk(_))); + + let result = match (a, b) { + // Trivial comparisons + (Value::Null, Value::Null) => true, + (Value::Bool(b1), Value::Bool(b2)) => b1 == b2, + (Value::String(s1), Value::String(s2)) => s1 == s2, + (Value::Path(p1), Value::Path(p2)) => p1 == p2, + + // Numerical comparisons (they work between float & int) + (Value::Integer(i1), Value::Integer(i2)) => i1 == i2, + (Value::Integer(i), Value::Float(f)) => i as f64 == f, + (Value::Float(f1), Value::Float(f2)) => f1 == f2, + (Value::Float(f), Value::Integer(i)) => i as f64 == f, + + // List comparisons + (Value::List(l1), Value::List(l2)) => { + if ptr_eq >= PointerEquality::AllowNested && l1.ptr_eq(&l2) { + return Ok(Value::Bool(true)); + } + + if l1.len() != l2.len() { + return Ok(Value::Bool(false)); + } + + for (vi1, vi2) in l1.into_iter().zip(l2.into_iter()) { + if !generators::check_equality( + &co, + vi1, + vi2, + std::cmp::max(ptr_eq, PointerEquality::AllowNested), + ) + .await? + { + return Ok(Value::Bool(false)); + } + } + + true + } + + (_, Value::List(_)) | (Value::List(_), _) => false, + + // Attribute set comparisons + (Value::Attrs(a1), Value::Attrs(a2)) => { + if ptr_eq >= PointerEquality::AllowNested && a1.ptr_eq(&a2) { + return Ok(Value::Bool(true)); + } + + // Special-case for derivation comparisons: If both attribute sets + // have `type = derivation`, compare them by `outPath`. + match (a1.select("type"), a2.select("type")) { + (Some(v1), Some(v2)) => { + let s1 = generators::request_force(&co, v1.clone()).await.to_str(); + let s2 = generators::request_force(&co, v2.clone()).await.to_str(); + + if let (Ok(s1), Ok(s2)) = (s1, s2) { + if s1.as_str() == "derivation" && s2.as_str() == "derivation" { + // TODO(tazjin): are the outPaths really required, + // or should it fall through? + let out1 = a1 + .select_required("outPath") + .context("comparing derivations")? + .clone(); + + let out2 = a2 + .select_required("outPath") + .context("comparing derivations")? + .clone(); + + let result = generators::request_force(&co, out1.clone()) + .await + .to_str()? + == generators::request_force(&co, out2.clone()) + .await + .to_str()?; + return Ok(Value::Bool(result)); + } + } + } + _ => {} + }; + + if a1.len() != a2.len() { + return Ok(Value::Bool(false)); + } + + let iter1 = a1.into_iter_sorted(); + let iter2 = a2.into_iter_sorted(); + + for ((k1, v1), (k2, v2)) in iter1.zip(iter2) { + if k1 != k2 { + return Ok(Value::Bool(false)); + } + + if !generators::check_equality( + &co, + v1, + v2, + std::cmp::max(ptr_eq, PointerEquality::AllowNested), + ) + .await? + { + return Ok(Value::Bool(false)); + } + } + + true + } + + (Value::Attrs(_), _) | (_, Value::Attrs(_)) => false, + + (Value::Closure(c1), Value::Closure(c2)) if ptr_eq >= PointerEquality::AllowNested => { + Rc::ptr_eq(&c1, &c2) + } + + // Everything else is either incomparable (e.g. internal types) or + // false. + _ => false, + }; + + Ok(Value::Bool(result)) + } + + pub fn type_of(&self) -> &'static str { + match self { + Value::Null => "null", + Value::Bool(_) => "bool", + Value::Integer(_) => "int", + Value::Float(_) => "float", + Value::String(_) => "string", + Value::Path(_) => "path", + Value::Attrs(_) => "set", + Value::List(_) => "list", + Value::Closure(_) | Value::Builtin(_) => "lambda", + + // Internal types. Note: These are only elaborated here + // because it makes debugging easier. If a user ever sees + // any of these strings, it's a bug. + Value::Thunk(_) => "internal[thunk]", + Value::AttrNotFound => "internal[attr_not_found]", + Value::Blueprint(_) => "internal[blueprint]", + Value::DeferredUpvalue(_) => "internal[deferred_upvalue]", + Value::UnresolvedPath(_) => "internal[unresolved_path]", + Value::Json(_) => "internal[json]", + } + } + + gen_cast!(as_bool, bool, "bool", Value::Bool(b), *b); + gen_cast!(as_int, i64, "int", Value::Integer(x), *x); + gen_cast!(as_float, f64, "float", Value::Float(x), *x); + gen_cast!(to_str, NixString, "string", Value::String(s), s.clone()); + gen_cast!(to_attrs, Box<NixAttrs>, "set", Value::Attrs(a), a.clone()); + gen_cast!(to_list, NixList, "list", Value::List(l), l.clone()); + gen_cast!( + as_closure, + Rc<Closure>, + "lambda", + Value::Closure(c), + c.clone() + ); + + gen_cast_mut!(as_list_mut, NixList, "list", List); + + gen_is!(is_path, Value::Path(_)); + gen_is!(is_number, Value::Integer(_) | Value::Float(_)); + gen_is!(is_bool, Value::Bool(_)); + + // TODO(amjoseph): de-asyncify this (when called directly by the VM) + /// Compare `self` against other using (fallible) Nix ordering semantics. + /// + /// Note that as this returns an `Option<Ordering>` it can not directly be + /// used as a generator function in the VM. The exact use depends on the + /// callsite, as the meaning is interpreted in different ways e.g. based on + /// the comparison operator used. + /// + /// The function is intended to be used from within other generator + /// functions or `gen!` blocks. + pub async fn nix_cmp_ordering( + self, + other: Self, + co: GenCo, + ) -> Result<Option<Ordering>, ErrorKind> { + Self::nix_cmp_ordering_(self, other, co).await + } + + async fn nix_cmp_ordering_( + mut myself: Self, + mut other: Self, + co: GenCo, + ) -> Result<Option<Ordering>, ErrorKind> { + 'outer: loop { + match (myself, other) { + // same types + (Value::Integer(i1), Value::Integer(i2)) => return Ok(i1.partial_cmp(&i2)), + (Value::Float(f1), Value::Float(f2)) => return Ok(f1.partial_cmp(&f2)), + (Value::String(s1), Value::String(s2)) => return Ok(s1.partial_cmp(&s2)), + (Value::List(l1), Value::List(l2)) => { + for i in 0.. { + if i == l2.len() { + return Ok(Some(Ordering::Greater)); + } else if i == l1.len() { + return Ok(Some(Ordering::Less)); + } else if !generators::check_equality( + &co, + l1[i].clone(), + l2[i].clone(), + PointerEquality::AllowAll, + ) + .await? + { + // TODO: do we need to control `top_level` here? + myself = generators::request_force(&co, l1[i].clone()).await; + other = generators::request_force(&co, l2[i].clone()).await; + continue 'outer; + } + } + + unreachable!() + } + + // different types + (Value::Integer(i1), Value::Float(f2)) => return Ok((i1 as f64).partial_cmp(&f2)), + (Value::Float(f1), Value::Integer(i2)) => return Ok(f1.partial_cmp(&(i2 as f64))), + + // unsupported types + (lhs, rhs) => { + return Err(ErrorKind::Incomparable { + lhs: lhs.type_of(), + rhs: rhs.type_of(), + }) + } + } + } + } + + // TODO(amjoseph): de-asyncify this (when called directly by the VM) + pub async fn force(self, co: GenCo, span: LightSpan) -> Result<Value, ErrorKind> { + if let Value::Thunk(thunk) = self { + return thunk.force(co, span).await; + } + + Ok(self) + } + + /// Explain a value in a human-readable way, e.g. by presenting + /// the docstrings of functions if present. + pub fn explain(&self) -> String { + match self { + Value::Null => "the 'null' value".into(), + Value::Bool(b) => format!("the boolean value '{}'", b), + Value::Integer(i) => format!("the integer '{}'", i), + Value::Float(f) => format!("the float '{}'", f), + Value::String(s) => format!("the string '{}'", s), + Value::Path(p) => format!("the path '{}'", p.to_string_lossy()), + Value::Attrs(attrs) => format!("a {}-item attribute set", attrs.len()), + Value::List(list) => format!("a {}-item list", list.len()), + + Value::Closure(f) => { + if let Some(name) = &f.lambda.name { + format!("the user-defined Nix function '{}'", name) + } else { + "a user-defined Nix function".to_string() + } + } + + Value::Builtin(b) => { + let mut out = format!("the builtin function '{}'", b.name()); + if let Some(docs) = b.documentation() { + out.push_str("\n\n"); + out.push_str(docs); + } + out + } + + // TODO: handle suspended thunks with a different explanation instead of panicking + Value::Thunk(t) => t.value().explain(), + + Value::AttrNotFound + | Value::Blueprint(_) + | Value::DeferredUpvalue(_) + | Value::UnresolvedPath(_) + | Value::Json(_) => "an internal Tvix evaluator value".into(), + } + } +} + +trait TotalDisplay { + fn total_fmt(&self, f: &mut std::fmt::Formatter<'_>, set: &mut ThunkSet) -> std::fmt::Result; +} + +impl Display for Value { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.total_fmt(f, &mut Default::default()) + } +} + +/// Emulates the C++-Nix style formatting of floats, which diverges +/// significantly from Rust's native float formatting. +fn total_fmt_float<F: std::fmt::Write>(num: f64, mut f: F) -> std::fmt::Result { + let mut buf = [b'0'; lexical_core::BUFFER_SIZE]; + let mut s = lexical_core::write_with_options::<f64, { CXX_LITERAL }>( + num, + &mut buf, + &WRITE_FLOAT_OPTIONS, + ); + + // apply some postprocessing on the buffer. If scientific + // notation is used (we see an `e`), and the next character is + // a digit, add the missing `+` sign.) + let mut new_s = Vec::with_capacity(s.len()); + + if s.contains(&b'e') { + for (i, c) in s.iter().enumerate() { + // encountered `e` + if c == &b'e' { + // next character is a digit (so no negative exponent) + if s.len() > i && s[i + 1].is_ascii_digit() { + // copy everything from the start up to (including) the e + new_s.extend_from_slice(&s[0..=i]); + // add the missing '+' + new_s.push(b'+'); + // check for the remaining characters. + // If it's only one, we need to prepend a trailing zero + if s.len() == i + 2 { + new_s.push(b'0'); + } + new_s.extend_from_slice(&s[i + 1..]); + break; + } + } + } + + // if we modified the scientific notation, flip the reference + if !new_s.is_empty() { + s = &mut new_s + } + } + // else, if this is not scientific notation, and there's a + // decimal point, make sure we really drop trailing zeroes. + // In some cases, lexical_core doesn't. + else if s.contains(&b'.') { + for (i, c) in s.iter().enumerate() { + // at `.`` + if c == &b'.' { + // trim zeroes from the right side. + let frac = String::from_utf8_lossy(&s[i + 1..]); + let frac_no_trailing_zeroes = frac.trim_end_matches('0'); + + if frac.len() != frac_no_trailing_zeroes.len() { + // we managed to strip something, construct new_s + if frac_no_trailing_zeroes.is_empty() { + // if frac_no_trailing_zeroes is empty, the fractional part was all zeroes, so we can drop the decimal point as well + new_s.extend_from_slice(&s[0..=i - 1]); + } else { + // else, assemble the rest of the string + new_s.extend_from_slice(&s[0..=i]); + new_s.extend_from_slice(frac_no_trailing_zeroes.as_bytes()); + } + + // flip the reference + s = &mut new_s; + break; + } + } + } + } + + write!(f, "{}", format!("{}", String::from_utf8_lossy(s))) +} + +impl TotalDisplay for Value { + fn total_fmt(&self, f: &mut std::fmt::Formatter<'_>, set: &mut ThunkSet) -> std::fmt::Result { + match self { + Value::Null => f.write_str("null"), + Value::Bool(true) => f.write_str("true"), + Value::Bool(false) => f.write_str("false"), + Value::Integer(num) => write!(f, "{}", num), + Value::String(s) => s.fmt(f), + Value::Path(p) => p.display().fmt(f), + Value::Attrs(attrs) => attrs.total_fmt(f, set), + Value::List(list) => list.total_fmt(f, set), + Value::Closure(_) => f.write_str("lambda"), // TODO: print position + Value::Builtin(builtin) => builtin.fmt(f), + + // Nix prints floats with a maximum precision of 5 digits + // only. Except when it decides to use scientific notation + // (with a + after the `e`, and zero-padded to 0 digits) + Value::Float(num) => total_fmt_float(*num, f), + + // internal types + Value::AttrNotFound => f.write_str("internal[not found]"), + Value::Blueprint(_) => f.write_str("internal[blueprint]"), + Value::DeferredUpvalue(_) => f.write_str("internal[deferred_upvalue]"), + Value::UnresolvedPath(_) => f.write_str("internal[unresolved_path]"), + Value::Json(_) => f.write_str("internal[json]"), + + // Delegate thunk display to the type, as it must handle + // the case of already evaluated or cyclic thunks. + Value::Thunk(t) => t.total_fmt(f, set), + } + } +} + +impl From<bool> for Value { + fn from(b: bool) -> Self { + Value::Bool(b) + } +} + +impl From<i64> for Value { + fn from(i: i64) -> Self { + Self::Integer(i) + } +} + +impl From<f64> for Value { + fn from(i: f64) -> Self { + Self::Float(i) + } +} + +impl From<PathBuf> for Value { + fn from(path: PathBuf) -> Self { + Self::Path(Box::new(path)) + } +} + +fn type_error(expected: &'static str, actual: &Value) -> ErrorKind { + ErrorKind::TypeError { + expected, + actual: actual.type_of(), + } +} + +#[cfg(test)] +mod tests { + mod floats { + use crate::value::total_fmt_float; + + #[test] + fn format_float() { + let ff = vec![ + (0f64, "0"), + (1.0f64, "1"), + (-0.01, "-0.01"), + (5e+22, "5e+22"), + (1e6, "1e+06"), + (-2E-2, "-0.02"), + (6.626e-34, "6.626e-34"), + (9_224_617.445_991_227, "9.22462e+06"), + ]; + for (n, expected) in ff.iter() { + let mut buf = String::new(); + let res = total_fmt_float(*n, &mut buf); + assert!(res.is_ok()); + assert_eq!( + expected, &buf, + "{} should be formatted as {}, but got {}", + n, expected, &buf + ); + } + } + } +} diff --git a/tvix/eval/src/value/path.rs b/tvix/eval/src/value/path.rs new file mode 100644 index 000000000000..ad526a8746f8 --- /dev/null +++ b/tvix/eval/src/value/path.rs @@ -0,0 +1,14 @@ +use path_clean::PathClean; +use std::path::PathBuf; + +/// This function should match the behavior of canonPath() in +/// src/libutil/util.cc of cppnix. Currently it does not match that +/// behavior; it uses the `path_clean` library which is based on the +/// Go standard library +/// +/// TODO: make this match the behavior of cppnix +/// TODO: write tests for this + +pub fn canon_path(path: PathBuf) -> PathBuf { + path.clean() +} diff --git a/tvix/eval/src/value/string.rs b/tvix/eval/src/value/string.rs new file mode 100644 index 000000000000..7144ca360d15 --- /dev/null +++ b/tvix/eval/src/value/string.rs @@ -0,0 +1,262 @@ +//! This module implements Nix language strings. +//! +//! Nix language strings never need to be modified on the language +//! level, allowing us to shave off some memory overhead and only +//! paying the cost when creating new strings. +use rnix::ast; +use std::ffi::OsStr; +use std::hash::Hash; +use std::ops::Deref; +use std::path::Path; +use std::{borrow::Cow, fmt::Display, str::Chars}; + +use serde::de::{Deserializer, Visitor}; +use serde::{Deserialize, Serialize}; + +#[repr(transparent)] +#[derive(Clone, Debug, Serialize)] +pub struct NixString(Box<str>); + +impl PartialEq for NixString { + fn eq(&self, other: &Self) -> bool { + self.as_str() == other.as_str() + } +} + +impl Eq for NixString {} + +impl PartialOrd for NixString { + fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { + self.as_str().partial_cmp(other.as_str()) + } +} + +impl Ord for NixString { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.as_str().cmp(other.as_str()) + } +} + +impl From<&str> for NixString { + fn from(s: &str) -> Self { + NixString(Box::from(s)) + } +} + +impl From<String> for NixString { + fn from(s: String) -> Self { + NixString(s.into_boxed_str()) + } +} + +impl From<Box<str>> for NixString { + fn from(s: Box<str>) -> Self { + Self(s) + } +} + +impl From<ast::Ident> for NixString { + fn from(ident: ast::Ident) -> Self { + ident.ident_token().unwrap().text().into() + } +} + +impl Hash for NixString { + fn hash<H: std::hash::Hasher>(&self, state: &mut H) { + self.as_str().hash(state) + } +} + +impl<'de> Deserialize<'de> for NixString { + fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> + where + D: Deserializer<'de>, + { + struct StringVisitor; + + impl<'de> Visitor<'de> for StringVisitor { + type Value = NixString; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + formatter.write_str("a valid Nix string") + } + + fn visit_string<E>(self, v: String) -> Result<Self::Value, E> + where + E: serde::de::Error, + { + Ok(v.into()) + } + + fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> + where + E: serde::de::Error, + { + Ok(v.into()) + } + } + + deserializer.deserialize_string(StringVisitor) + } +} + +#[cfg(feature = "arbitrary")] +mod arbitrary { + use super::*; + use proptest::prelude::{any_with, Arbitrary}; + use proptest::strategy::{BoxedStrategy, Strategy}; + + impl Arbitrary for NixString { + type Parameters = <String as Arbitrary>::Parameters; + + type Strategy = BoxedStrategy<Self>; + + fn arbitrary_with(args: Self::Parameters) -> Self::Strategy { + any_with::<String>(args).prop_map(Self::from).boxed() + } + } +} + +impl NixString { + pub fn as_str(&self) -> &str { + &self.0 + } + + /// Return a displayable representation of the string as an + /// identifier. + /// + /// This is used when printing out strings used as e.g. attribute + /// set keys, as those are only escaped in the presence of special + /// characters. + pub fn ident_str(&self) -> Cow<str> { + let escaped = nix_escape_string(self.as_str()); + + match escaped { + // A borrowed string is unchanged and can be returned as + // is. + Cow::Borrowed(_) => { + if is_valid_nix_identifier(&escaped) { + escaped + } else { + Cow::Owned(format!("\"{}\"", escaped)) + } + } + + // An owned string has escapes, and needs the outer quotes + // for display. + Cow::Owned(s) => Cow::Owned(format!("\"{}\"", s)), + } + } + + pub fn concat(&self, other: &Self) -> Self { + let mut s = self.as_str().to_owned(); + s.push_str(other.as_str()); + NixString(s.into_boxed_str()) + } + + pub fn chars(&self) -> Chars<'_> { + self.0.chars() + } +} + +fn nix_escape_char(ch: char, next: Option<&char>) -> Option<&'static str> { + match (ch, next) { + ('\\', _) => Some("\\\\"), + ('"', _) => Some("\\\""), + ('\n', _) => Some("\\n"), + ('\t', _) => Some("\\t"), + ('\r', _) => Some("\\r"), + ('$', Some('{')) => Some("\\$"), + _ => None, + } +} + +/// Return true if this string can be used as an identifier in Nix. +fn is_valid_nix_identifier(s: &str) -> bool { + // adapted from rnix-parser's tokenizer.rs + let mut chars = s.chars(); + match chars.next() { + Some('a'..='z' | 'A'..='Z' | '_') => (), + _ => return false, + } + for c in chars { + match c { + 'a'..='z' | 'A'..='Z' | '0'..='9' | '_' | '-' | '\'' => (), + _ => return false, + } + } + true +} + +/// Escape a Nix string for display, as most user-visible representation +/// are escaped strings. +/// +/// Note that this does not add the outer pair of surrounding quotes. +fn nix_escape_string(input: &str) -> Cow<str> { + let mut iter = input.chars().enumerate().peekable(); + + while let Some((i, c)) = iter.next() { + if let Some(esc) = nix_escape_char(c, iter.peek().map(|(_, c)| c)) { + let mut escaped = String::with_capacity(input.len()); + escaped.push_str(&input[..i]); + escaped.push_str(esc); + + let mut inner_iter = input[i + 1..].chars().peekable(); + while let Some(c) = inner_iter.next() { + match nix_escape_char(c, inner_iter.peek()) { + Some(esc) => escaped.push_str(esc), + None => escaped.push(c), + } + } + + return Cow::Owned(escaped); + } + } + + Cow::Borrowed(input) +} + +impl Display for NixString { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str("\"")?; + f.write_str(&nix_escape_string(self.as_str()))?; + f.write_str("\"") + } +} + +impl AsRef<str> for NixString { + fn as_ref(&self) -> &str { + self.as_str() + } +} + +impl AsRef<OsStr> for NixString { + fn as_ref(&self) -> &OsStr { + self.as_str().as_ref() + } +} + +impl AsRef<Path> for NixString { + fn as_ref(&self) -> &Path { + self.as_str().as_ref() + } +} + +impl Deref for NixString { + type Target = str; + + fn deref(&self) -> &Self::Target { + self.as_str() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + use crate::properties::{eq_laws, hash_laws, ord_laws}; + + eq_laws!(NixString); + hash_laws!(NixString); + ord_laws!(NixString); +} diff --git a/tvix/eval/src/value/thunk.rs b/tvix/eval/src/value/thunk.rs new file mode 100644 index 000000000000..9c048d40b499 --- /dev/null +++ b/tvix/eval/src/value/thunk.rs @@ -0,0 +1,365 @@ +//! This module implements the runtime representation of Thunks. +//! +//! Thunks are a special kind of Nix value, similar to a 0-argument +//! closure that yields some value. Thunks are used to implement the +//! lazy evaluation behaviour of Nix: +//! +//! Whenever the compiler determines that an expression should be +//! evaluated lazily, it creates a thunk instead of compiling the +//! expression value directly. At any point in the runtime where the +//! actual value of a thunk is required, it is "forced", meaning that +//! the encompassing computation takes place and the thunk takes on +//! its new value. +//! +//! Thunks have interior mutability to be able to memoise their +//! computation. Once a thunk is evaluated, its internal +//! representation becomes the result of the expression. It is legal +//! for the runtime to replace a thunk object directly with its value +//! object, but when forcing a thunk, the runtime *must* mutate the +//! memoisable slot. + +use std::{ + cell::{Ref, RefCell, RefMut}, + collections::HashSet, + fmt::Debug, + rc::Rc, +}; + +use crate::{ + errors::ErrorKind, + opcode::OpCode, + spans::LightSpan, + upvalues::Upvalues, + value::Closure, + vm::generators::{self, GenCo}, + Value, +}; + +use super::{Lambda, TotalDisplay}; +use codemap::Span; + +/// Internal representation of a suspended native thunk. +struct SuspendedNative(Box<dyn Fn() -> Result<Value, ErrorKind>>); + +impl Debug for SuspendedNative { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "SuspendedNative({:p})", self.0) + } +} + +/// Internal representation of the different states of a thunk. +/// +/// Upvalues must be finalised before leaving the initial state +/// (Suspended or RecursiveClosure). The [`value()`] function may +/// not be called until the thunk is in the final state (Evaluated). +#[derive(Debug)] +enum ThunkRepr { + /// Thunk is closed over some values, suspended and awaiting + /// execution. + Suspended { + lambda: Rc<Lambda>, + upvalues: Rc<Upvalues>, + light_span: LightSpan, + }, + + /// Thunk is a suspended native computation. + Native(SuspendedNative), + + /// Thunk currently under-evaluation; encountering a blackhole + /// value means that infinite recursion has occured. + Blackhole { + /// Span at which the thunk was first forced. + forced_at: LightSpan, + + /// Span at which the thunk was originally suspended. + suspended_at: Option<LightSpan>, + + /// Span of the first instruction of the actual code inside + /// the thunk. + content_span: Option<Span>, + }, + + /// Fully evaluated thunk. + Evaluated(Value), +} + +impl ThunkRepr { + fn debug_repr(&self) -> String { + match self { + ThunkRepr::Evaluated(v) => format!("thunk(val|{})", v), + ThunkRepr::Blackhole { .. } => "thunk(blackhole)".to_string(), + ThunkRepr::Native(_) => "thunk(native)".to_string(), + ThunkRepr::Suspended { lambda, .. } => format!("thunk({:p})", *lambda), + } + } +} + +/// A thunk is created for any value which requires non-strict +/// evaluation due to self-reference or lazy semantics (or both). +/// Every reference cycle involving `Value`s will contain at least +/// one `Thunk`. +#[derive(Clone, Debug)] +pub struct Thunk(Rc<RefCell<ThunkRepr>>); + +impl Thunk { + pub fn new_closure(lambda: Rc<Lambda>) -> Self { + Thunk(Rc::new(RefCell::new(ThunkRepr::Evaluated(Value::Closure( + Rc::new(Closure { + upvalues: Rc::new(Upvalues::with_capacity(lambda.upvalue_count)), + lambda: lambda.clone(), + }), + ))))) + } + + pub fn new_suspended(lambda: Rc<Lambda>, light_span: LightSpan) -> Self { + Thunk(Rc::new(RefCell::new(ThunkRepr::Suspended { + upvalues: Rc::new(Upvalues::with_capacity(lambda.upvalue_count)), + lambda: lambda.clone(), + light_span, + }))) + } + + pub fn new_suspended_native(native: Box<dyn Fn() -> Result<Value, ErrorKind>>) -> Self { + Thunk(Rc::new(RefCell::new(ThunkRepr::Native(SuspendedNative( + native, + ))))) + } + + /// Helper function to create a [`Thunk`] that calls a function given as the + /// [`Value`] `callee` with the argument `arg` when it is forced. This is + /// particularly useful in builtin implementations if the result of calling + /// a function does not need to be forced immediately, because e.g. it is + /// stored in an attribute set. + pub fn new_suspended_call(callee: Value, arg: Value, light_span: LightSpan) -> Self { + let mut lambda = Lambda::default(); + let span = light_span.span(); + + let arg_idx = lambda.chunk().push_constant(arg); + let f_idx = lambda.chunk().push_constant(callee); + + // This is basically a recreation of compile_apply(): + // We need to push the argument onto the stack and then the function. + // The function (not the argument) needs to be forced before calling. + lambda.chunk.push_op(OpCode::OpConstant(arg_idx), span); + lambda.chunk().push_op(OpCode::OpConstant(f_idx), span); + lambda.chunk.push_op(OpCode::OpForce, span); + lambda.chunk.push_op(OpCode::OpCall, span); + + // Inform the VM that the chunk has ended + lambda.chunk.push_op(OpCode::OpReturn, span); + + Thunk(Rc::new(RefCell::new(ThunkRepr::Suspended { + upvalues: Rc::new(Upvalues::with_capacity(0)), + lambda: Rc::new(lambda), + light_span, + }))) + } + + fn prepare_blackhole(&self, forced_at: LightSpan) -> ThunkRepr { + match &*self.0.borrow() { + ThunkRepr::Suspended { + light_span, lambda, .. + } => ThunkRepr::Blackhole { + forced_at, + suspended_at: Some(light_span.clone()), + content_span: Some(lambda.chunk.first_span()), + }, + + _ => ThunkRepr::Blackhole { + forced_at, + suspended_at: None, + content_span: None, + }, + } + } + + // TODO(amjoseph): de-asyncify this + pub async fn force(self, co: GenCo, span: LightSpan) -> Result<Value, ErrorKind> { + // If the current thunk is already fully evaluated, return its evaluated + // value. The VM will continue running the code that landed us here. + if self.is_forced() { + return Ok(self.value().clone()); + } + + // Begin evaluation of this thunk by marking it as a blackhole, meaning + // that any other forcing frame encountering this thunk before its + // evaluation is completed detected an evaluation cycle. + let inner = self.0.replace(self.prepare_blackhole(span)); + + match inner { + // If there was already a blackhole in the thunk, this is an + // evaluation cycle. + ThunkRepr::Blackhole { + forced_at, + suspended_at, + content_span, + } => Err(ErrorKind::InfiniteRecursion { + first_force: forced_at.span(), + suspended_at: suspended_at.map(|s| s.span()), + content_span, + }), + + // If there is a native function stored in the thunk, evaluate it + // and replace this thunk's representation with the result. + ThunkRepr::Native(native) => { + let value = native.0()?; + + // Force the returned value again, in case the native call + // returned a thunk. + let value = generators::request_force(&co, value).await; + + self.0.replace(ThunkRepr::Evaluated(value.clone())); + Ok(value) + } + + // When encountering a suspended thunk, request that the VM enters + // it and produces the result. + ThunkRepr::Suspended { + lambda, + upvalues, + light_span, + } => { + let value = + generators::request_enter_lambda(&co, lambda, upvalues, light_span).await; + + // This may have returned another thunk, so we need to request + // that the VM forces this value, too. + let value = generators::request_force(&co, value).await; + + self.0.replace(ThunkRepr::Evaluated(value.clone())); + Ok(value) + } + + // If an inner value is found, force it and then update. This is + // most likely an inner thunk, as `Thunk:is_forced` returned false. + ThunkRepr::Evaluated(val) => { + let value = generators::request_force(&co, val).await; + self.0.replace(ThunkRepr::Evaluated(value.clone())); + Ok(value) + } + } + } + + pub fn finalise(&self, stack: &[Value]) { + self.upvalues_mut().resolve_deferred_upvalues(stack); + } + + pub fn is_evaluated(&self) -> bool { + matches!(*self.0.borrow(), ThunkRepr::Evaluated(_)) + } + + pub fn is_suspended(&self) -> bool { + matches!( + *self.0.borrow(), + ThunkRepr::Suspended { .. } | ThunkRepr::Native(_) + ) + } + + /// Returns true if forcing this thunk will not change it. + pub fn is_forced(&self) -> bool { + match *self.0.borrow() { + ThunkRepr::Evaluated(Value::Thunk(_)) => false, + ThunkRepr::Evaluated(_) => true, + _ => false, + } + } + + /// Returns a reference to the inner evaluated value of a thunk. + /// It is an error to call this on a thunk that has not been + /// forced, or is not otherwise known to be fully evaluated. + // Note: Due to the interior mutability of thunks this is + // difficult to represent in the type system without impacting the + // API too much. + pub fn value(&self) -> Ref<Value> { + Ref::map(self.0.borrow(), |thunk| match thunk { + ThunkRepr::Evaluated(value) => value, + ThunkRepr::Blackhole { .. } => panic!("Thunk::value called on a black-holed thunk"), + ThunkRepr::Suspended { .. } | ThunkRepr::Native(_) => { + panic!("Thunk::value called on a suspended thunk") + } + }) + } + + pub fn upvalues(&self) -> Ref<'_, Upvalues> { + Ref::map(self.0.borrow(), |thunk| match thunk { + ThunkRepr::Suspended { upvalues, .. } => upvalues.as_ref(), + ThunkRepr::Evaluated(Value::Closure(c)) => &c.upvalues, + _ => panic!("upvalues() on non-suspended thunk"), + }) + } + + pub fn upvalues_mut(&self) -> RefMut<'_, Upvalues> { + RefMut::map(self.0.borrow_mut(), |thunk| match thunk { + ThunkRepr::Suspended { upvalues, .. } => Rc::get_mut(upvalues).unwrap(), + ThunkRepr::Evaluated(Value::Closure(c)) => Rc::get_mut( + &mut Rc::get_mut(c).unwrap().upvalues, + ) + .expect( + "upvalues_mut() was called on a thunk which already had multiple references to it", + ), + thunk => panic!("upvalues() on non-suspended thunk: {thunk:?}"), + }) + } + + /// Do not use this without first reading and understanding + /// `tvix/docs/value-pointer-equality.md`. + pub(crate) fn ptr_eq(&self, other: &Self) -> bool { + if Rc::ptr_eq(&self.0, &other.0) { + return true; + } + match &*self.0.borrow() { + ThunkRepr::Evaluated(Value::Closure(c1)) => match &*other.0.borrow() { + ThunkRepr::Evaluated(Value::Closure(c2)) => Rc::ptr_eq(c1, c2), + _ => false, + }, + _ => false, + } + } + + /// Helper function to format thunks in observer output. + pub(crate) fn debug_repr(&self) -> String { + self.0.borrow().debug_repr() + } +} + +impl TotalDisplay for Thunk { + fn total_fmt(&self, f: &mut std::fmt::Formatter<'_>, set: &mut ThunkSet) -> std::fmt::Result { + if !set.insert(self) { + return f.write_str("<CYCLE>"); + } + + match &*self.0.borrow() { + ThunkRepr::Evaluated(v) => v.total_fmt(f, set), + ThunkRepr::Suspended { .. } | ThunkRepr::Native(_) => f.write_str("<CODE>"), + other => write!(f, "internal[{}]", other.debug_repr()), + } + } +} + +/// A wrapper type for tracking which thunks have already been seen in a +/// context. This is necessary for cycle detection. +/// +/// The inner `HashSet` is not available on the outside, as it would be +/// potentially unsafe to interact with the pointers in the set. +#[derive(Default)] +pub struct ThunkSet(HashSet<*const ThunkRepr>); + +impl ThunkSet { + /// Check whether the given thunk has already been seen. Will mark the thunk + /// as seen otherwise. + pub fn insert(&mut self, thunk: &Thunk) -> bool { + let ptr: *const ThunkRepr = thunk.0.as_ptr(); + self.0.insert(ptr) + } +} + +#[derive(Default, Clone)] +pub struct SharedThunkSet(Rc<RefCell<ThunkSet>>); + +impl SharedThunkSet { + /// Check whether the given thunk has already been seen. Will mark the thunk + /// as seen otherwise. + pub fn insert(&self, thunk: &Thunk) -> bool { + self.0.borrow_mut().insert(thunk) + } +} diff --git a/tvix/eval/src/vm/generators.rs b/tvix/eval/src/vm/generators.rs new file mode 100644 index 000000000000..f86683dff542 --- /dev/null +++ b/tvix/eval/src/vm/generators.rs @@ -0,0 +1,786 @@ +//! This module implements generator logic for the VM. Generators are functions +//! used during evaluation which can suspend their execution during their +//! control flow, and request that the VM do something. +//! +//! This is used to keep the VM's stack size constant even when evaluating +//! deeply nested recursive data structures. +//! +//! We implement generators using the [`genawaiter`] crate. + +use core::pin::Pin; +use genawaiter::rc::Co; +pub use genawaiter::rc::Gen; +use smol_str::SmolStr; +use std::fmt::Display; +use std::future::Future; + +use crate::value::{PointerEquality, SharedThunkSet}; +use crate::warnings::{EvalWarning, WarningKind}; +use crate::FileType; +use crate::NixString; + +use super::*; + +// -- Implementation of generic generator logic. + +/// States that a generator can be in while being driven by the VM. +pub(crate) enum GeneratorState { + /// Normal execution of the generator. + Running, + + /// Generator is awaiting the result of a forced value. + AwaitingValue, +} + +/// Messages that can be sent from generators *to* the VM. In most +/// cases, the VM will suspend the generator when receiving a message +/// and enter some other frame to process the request. +/// +/// Responses are returned to generators via the [`GeneratorResponse`] type. +pub enum VMRequest { + /// Request that the VM forces this value. This message is first sent to the + /// VM with the unforced value, then returned to the generator with the + /// forced result. + ForceValue(Value), + + /// Request that the VM deep-forces the value. + DeepForceValue(Value, SharedThunkSet), + + /// Request the value at the given index from the VM's with-stack, in forced + /// state. + /// + /// The value is returned in the `ForceValue` message. + WithValue(usize), + + /// Request the value at the given index from the *captured* with-stack, in + /// forced state. + CapturedWithValue(usize), + + /// Request that the two values be compared for Nix equality. The result is + /// returned in the `ForceValue` message. + NixEquality(Box<(Value, Value)>, PointerEquality), + + /// Push the given value to the VM's stack. This is used to prepare the + /// stack for requesting a function call from the VM. + /// + /// The VM does not respond to this request, so the next message received is + /// `Empty`. + StackPush(Value), + + /// Pop a value from the stack and return it to the generator. + StackPop, + + /// Request that the VM coerces this value to a string. + StringCoerce(Value, CoercionKind), + + /// Request that the VM calls the given value, with arguments already + /// prepared on the stack. Value must already be forced. + Call(Value), + + /// Request a call frame entering the given lambda immediately. This can be + /// used to force thunks. + EnterLambda { + lambda: Rc<Lambda>, + upvalues: Rc<Upvalues>, + light_span: LightSpan, + }, + + /// Emit a runtime warning (already containing a span) through the VM. + EmitWarning(EvalWarning), + + /// Emit a runtime warning through the VM. The span of the current generator + /// is used for the final warning. + EmitWarningKind(WarningKind), + + /// Request a lookup in the VM's import cache, which tracks the + /// thunks yielded by previously imported files. + ImportCacheLookup(PathBuf), + + /// Provide the VM with an imported value for a given path, which + /// it can populate its input cache with. + ImportCachePut(PathBuf, Value), + + /// Request that the VM imports the given path through its I/O interface. + PathImport(PathBuf), + + /// Request that the VM reads the given path to a string. + ReadToString(PathBuf), + + /// Request that the VM checks whether the given path exists. + PathExists(PathBuf), + + /// Request that the VM reads the given path. + ReadDir(PathBuf), + + /// Request a reasonable span from the VM. + Span, + + /// Request evaluation of `builtins.tryEval` from the VM. See + /// [`VM::catch_result`] for an explanation of how this works. + TryForce(Value), + + /// Request serialisation of a value to JSON, according to the + /// slightly odd Nix evaluation rules. + ToJson(Value), +} + +/// Human-readable representation of a generator message, used by observers. +impl Display for VMRequest { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + VMRequest::ForceValue(v) => write!(f, "force_value({})", v.type_of()), + VMRequest::DeepForceValue(v, _) => { + write!(f, "deep_force_value({})", v.type_of()) + } + VMRequest::WithValue(_) => write!(f, "with_value"), + VMRequest::CapturedWithValue(_) => write!(f, "captured_with_value"), + VMRequest::NixEquality(values, ptr_eq) => { + write!( + f, + "nix_eq({}, {}, PointerEquality::{:?})", + values.0.type_of(), + values.1.type_of(), + ptr_eq + ) + } + VMRequest::StackPush(v) => write!(f, "stack_push({})", v.type_of()), + VMRequest::StackPop => write!(f, "stack_pop"), + VMRequest::StringCoerce(v, kind) => match kind { + CoercionKind::Weak => write!(f, "weak_string_coerce({})", v.type_of()), + CoercionKind::Strong => write!(f, "strong_string_coerce({})", v.type_of()), + }, + VMRequest::Call(v) => write!(f, "call({})", v), + VMRequest::EnterLambda { lambda, .. } => { + write!(f, "enter_lambda({:p})", *lambda) + } + VMRequest::EmitWarning(_) => write!(f, "emit_warning"), + VMRequest::EmitWarningKind(_) => write!(f, "emit_warning_kind"), + VMRequest::ImportCacheLookup(p) => { + write!(f, "import_cache_lookup({})", p.to_string_lossy()) + } + VMRequest::ImportCachePut(p, _) => { + write!(f, "import_cache_put({})", p.to_string_lossy()) + } + VMRequest::PathImport(p) => write!(f, "path_import({})", p.to_string_lossy()), + VMRequest::ReadToString(p) => { + write!(f, "read_to_string({})", p.to_string_lossy()) + } + VMRequest::PathExists(p) => write!(f, "path_exists({})", p.to_string_lossy()), + VMRequest::ReadDir(p) => write!(f, "read_dir({})", p.to_string_lossy()), + VMRequest::Span => write!(f, "span"), + VMRequest::TryForce(v) => write!(f, "try_force({})", v.type_of()), + VMRequest::ToJson(v) => write!(f, "to_json({})", v.type_of()), + } + } +} + +/// Responses returned to generators *from* the VM. +pub enum VMResponse { + /// Empty message. Passed to the generator as the first message, + /// or when return values were optional. + Empty, + + /// Value produced by the VM and returned to the generator. + Value(Value), + + /// Path produced by the VM in response to some IO operation. + Path(PathBuf), + + /// VM response with the contents of a directory. + Directory(Vec<(SmolStr, FileType)>), + + /// VM response with a span to use at the current point. + Span(LightSpan), + + /// Message returned by the VM when a catchable error is encountered during + /// the evaluation of `builtins.tryEval`. + ForceError, +} + +impl Display for VMResponse { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + VMResponse::Empty => write!(f, "empty"), + VMResponse::Value(v) => write!(f, "value({})", v), + VMResponse::Path(p) => write!(f, "path({})", p.to_string_lossy()), + VMResponse::Directory(d) => write!(f, "dir(len = {})", d.len()), + VMResponse::Span(_) => write!(f, "span"), + VMResponse::ForceError => write!(f, "force_error"), + } + } +} + +pub(crate) type Generator = + Gen<VMRequest, VMResponse, Pin<Box<dyn Future<Output = Result<Value, ErrorKind>>>>>; + +/// Helper function to provide type annotations which are otherwise difficult to +/// infer. +pub fn pin_generator( + f: impl Future<Output = Result<Value, ErrorKind>> + 'static, +) -> Pin<Box<dyn Future<Output = Result<Value, ErrorKind>>>> { + Box::pin(f) +} + +impl<'o> VM<'o> { + /// Helper function to re-enqueue the current generator while it + /// is awaiting a value. + fn reenqueue_generator(&mut self, name: &'static str, span: LightSpan, generator: Generator) { + self.frames.push(Frame::Generator { + name, + generator, + span, + state: GeneratorState::AwaitingValue, + }); + } + + /// Helper function to enqueue a new generator. + pub(super) fn enqueue_generator<F, G>(&mut self, name: &'static str, span: LightSpan, gen: G) + where + F: Future<Output = Result<Value, ErrorKind>> + 'static, + G: FnOnce(GenCo) -> F, + { + self.frames.push(Frame::Generator { + name, + span, + state: GeneratorState::Running, + generator: Gen::new(|co| pin_generator(gen(co))), + }); + } + + /// Run a generator frame until it yields to the outer control loop, or runs + /// to completion. + /// + /// The return value indicates whether the generator has completed (true), + /// or was suspended (false). + pub(crate) fn run_generator( + &mut self, + name: &'static str, + span: LightSpan, + frame_id: usize, + state: GeneratorState, + mut generator: Generator, + initial_message: Option<VMResponse>, + ) -> EvalResult<bool> { + // Determine what to send to the generator based on its state. + let mut message = match (initial_message, state) { + (Some(msg), _) => msg, + (_, GeneratorState::Running) => VMResponse::Empty, + + // If control returned here, and the generator is + // awaiting a value, send it the top of the stack. + (_, GeneratorState::AwaitingValue) => VMResponse::Value(self.stack_pop()), + }; + + loop { + match generator.resume_with(message) { + // If the generator yields, it contains an instruction + // for what the VM should do. + genawaiter::GeneratorState::Yielded(request) => { + self.observer.observe_generator_request(name, &request); + + match request { + VMRequest::StackPush(value) => { + self.stack.push(value); + message = VMResponse::Empty; + } + + VMRequest::StackPop => { + message = VMResponse::Value(self.stack_pop()); + } + + // Generator has requested a force, which means that + // this function prepares the frame stack and yields + // back to the outer VM loop. + VMRequest::ForceValue(value) => { + self.reenqueue_generator(name, span.clone(), generator); + self.enqueue_generator("force", span.clone(), |co| { + value.force(co, span) + }); + return Ok(false); + } + + // Generator has requested a deep-force. + VMRequest::DeepForceValue(value, thunk_set) => { + self.reenqueue_generator(name, span.clone(), generator); + self.enqueue_generator("deep_force", span, |co| { + value.deep_force(co, thunk_set) + }); + return Ok(false); + } + + // Generator has requested a value from the with-stack. + // Logic is similar to `ForceValue`, except with the + // value being taken from that stack. + VMRequest::WithValue(idx) => { + self.reenqueue_generator(name, span.clone(), generator); + + let value = self.stack[self.with_stack[idx]].clone(); + self.enqueue_generator("force", span.clone(), |co| { + value.force(co, span) + }); + + return Ok(false); + } + + // Generator has requested a value from the *captured* + // with-stack. Logic is same as above, except for the + // value being from that stack. + VMRequest::CapturedWithValue(idx) => { + self.reenqueue_generator(name, span.clone(), generator); + + let call_frame = self.last_call_frame() + .expect("Tvix bug: generator requested captured with-value, but there is no call frame"); + + let value = call_frame.upvalues.with_stack().unwrap()[idx].clone(); + self.enqueue_generator("force", span.clone(), |co| { + value.force(co, span) + }); + + return Ok(false); + } + + VMRequest::NixEquality(values, ptr_eq) => { + let values = *values; + self.reenqueue_generator(name, span.clone(), generator); + self.enqueue_generator("nix_eq", span, |co| { + values.0.nix_eq(values.1, co, ptr_eq) + }); + return Ok(false); + } + + VMRequest::StringCoerce(val, kind) => { + self.reenqueue_generator(name, span.clone(), generator); + self.enqueue_generator("coerce_to_string", span, |co| { + val.coerce_to_string(co, kind) + }); + return Ok(false); + } + + VMRequest::Call(callable) => { + self.reenqueue_generator(name, span.clone(), generator); + self.call_value(span, None, callable)?; + return Ok(false); + } + + VMRequest::EnterLambda { + lambda, + upvalues, + light_span, + } => { + self.reenqueue_generator(name, span, generator); + + self.frames.push(Frame::CallFrame { + span: light_span, + call_frame: CallFrame { + lambda, + upvalues, + ip: CodeIdx(0), + stack_offset: self.stack.len(), + }, + }); + + return Ok(false); + } + + VMRequest::EmitWarning(warning) => { + self.push_warning(warning); + message = VMResponse::Empty; + } + + VMRequest::EmitWarningKind(kind) => { + self.emit_warning(kind); + message = VMResponse::Empty; + } + + VMRequest::ImportCacheLookup(path) => { + if let Some(cached) = self.import_cache.get(&path) { + message = VMResponse::Value(cached.clone()); + } else { + message = VMResponse::Empty; + } + } + + VMRequest::ImportCachePut(path, value) => { + self.import_cache.insert(path, value); + message = VMResponse::Empty; + } + + VMRequest::PathImport(path) => { + let imported = self + .io_handle + .import_path(&path) + .map_err(|e| ErrorKind::IO { + path: Some(path), + error: e.into(), + }) + .with_span(&span, self)?; + + message = VMResponse::Path(imported); + } + + VMRequest::ReadToString(path) => { + let content = self + .io_handle + .read_to_string(&path) + .map_err(|e| ErrorKind::IO { + path: Some(path), + error: e.into(), + }) + .with_span(&span, self)?; + + message = VMResponse::Value(Value::String(content.into())) + } + + VMRequest::PathExists(path) => { + let exists = self + .io_handle + .path_exists(&path) + .map_err(|e| ErrorKind::IO { + path: Some(path), + error: e.into(), + }) + .map(Value::Bool) + .with_span(&span, self)?; + + message = VMResponse::Value(exists); + } + + VMRequest::ReadDir(path) => { + let dir = self + .io_handle + .read_dir(&path) + .map_err(|e| ErrorKind::IO { + path: Some(path), + error: e.into(), + }) + .with_span(&span, self)?; + message = VMResponse::Directory(dir); + } + + VMRequest::Span => { + message = VMResponse::Span(self.reasonable_light_span()); + } + + VMRequest::TryForce(value) => { + self.try_eval_frames.push(frame_id); + self.reenqueue_generator(name, span.clone(), generator); + + debug_assert!( + self.frames.len() == frame_id + 1, + "generator should be reenqueued with the same frame ID" + ); + + self.enqueue_generator("force", span.clone(), |co| { + value.force(co, span) + }); + return Ok(false); + } + + VMRequest::ToJson(value) => { + self.reenqueue_generator(name, span.clone(), generator); + self.enqueue_generator("to_json", span, |co| { + value.to_json_generator(co) + }); + return Ok(false); + } + } + } + + // Generator has completed, and its result value should + // be left on the stack. + genawaiter::GeneratorState::Complete(result) => { + let value = result.with_span(&span, self)?; + self.stack.push(value); + return Ok(true); + } + } + } + } +} + +pub type GenCo = Co<VMRequest, VMResponse>; + +// -- Implementation of concrete generator use-cases. + +/// Request that the VM place the given value on its stack. +pub async fn request_stack_push(co: &GenCo, val: Value) { + match co.yield_(VMRequest::StackPush(val)).await { + VMResponse::Empty => {} + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } +} + +/// Request that the VM pop a value from the stack and return it to the +/// generator. +pub async fn request_stack_pop(co: &GenCo) -> Value { + match co.yield_(VMRequest::StackPop).await { + VMResponse::Value(value) => value, + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } +} + +/// Force any value and return the evaluated result from the VM. +pub async fn request_force(co: &GenCo, val: Value) -> Value { + if let Value::Thunk(_) = val { + match co.yield_(VMRequest::ForceValue(val)).await { + VMResponse::Value(value) => value, + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } + } else { + val + } +} + +/// Force a value, but inform the caller (by returning `None`) if a catchable +/// error occured. +pub(crate) async fn request_try_force(co: &GenCo, val: Value) -> Option<Value> { + if let Value::Thunk(_) = val { + match co.yield_(VMRequest::TryForce(val)).await { + VMResponse::Value(value) => Some(value), + VMResponse::ForceError => None, + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } + } else { + Some(val) + } +} + +/// Call the given value as a callable. The argument(s) must already be prepared +/// on the stack. +pub async fn request_call(co: &GenCo, val: Value) -> Value { + let val = request_force(co, val).await; + match co.yield_(VMRequest::Call(val)).await { + VMResponse::Value(value) => value, + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } +} + +/// Helper function to call the given value with the provided list of arguments. +/// This uses the StackPush and Call messages under the hood. +pub async fn request_call_with<I>(co: &GenCo, mut callable: Value, args: I) -> Value +where + I: IntoIterator<Item = Value>, + I::IntoIter: DoubleEndedIterator, +{ + let mut num_args = 0_usize; + for arg in args.into_iter().rev() { + num_args += 1; + request_stack_push(co, arg).await; + } + + debug_assert!(num_args > 0, "call_with called with an empty list of args"); + + while num_args > 0 { + callable = request_call(co, callable).await; + num_args -= 1; + } + + callable +} + +pub async fn request_string_coerce(co: &GenCo, val: Value, kind: CoercionKind) -> NixString { + match val { + Value::String(s) => s, + _ => match co.yield_(VMRequest::StringCoerce(val, kind)).await { + VMResponse::Value(value) => value + .to_str() + .expect("coerce_to_string always returns a string"), + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + }, + } +} + +/// Deep-force any value and return the evaluated result from the VM. +pub async fn request_deep_force(co: &GenCo, val: Value, thunk_set: SharedThunkSet) -> Value { + match co.yield_(VMRequest::DeepForceValue(val, thunk_set)).await { + VMResponse::Value(value) => value, + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } +} + +/// Ask the VM to compare two values for equality. +pub(crate) async fn check_equality( + co: &GenCo, + a: Value, + b: Value, + ptr_eq: PointerEquality, +) -> Result<bool, ErrorKind> { + match co + .yield_(VMRequest::NixEquality(Box::new((a, b)), ptr_eq)) + .await + { + VMResponse::Value(value) => value.as_bool(), + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } +} + +/// Emit a fully constructed runtime warning. +pub(crate) async fn emit_warning(co: &GenCo, warning: EvalWarning) { + match co.yield_(VMRequest::EmitWarning(warning)).await { + VMResponse::Empty => {} + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } +} + +/// Emit a runtime warning with the span of the current generator. +pub(crate) async fn emit_warning_kind(co: &GenCo, kind: WarningKind) { + match co.yield_(VMRequest::EmitWarningKind(kind)).await { + VMResponse::Empty => {} + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } +} + +/// Request that the VM enter the given lambda. +pub(crate) async fn request_enter_lambda( + co: &GenCo, + lambda: Rc<Lambda>, + upvalues: Rc<Upvalues>, + light_span: LightSpan, +) -> Value { + let msg = VMRequest::EnterLambda { + lambda, + upvalues, + light_span, + }; + + match co.yield_(msg).await { + VMResponse::Value(value) => value, + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } +} + +/// Request a lookup in the VM's import cache. +pub(crate) async fn request_import_cache_lookup(co: &GenCo, path: PathBuf) -> Option<Value> { + match co.yield_(VMRequest::ImportCacheLookup(path)).await { + VMResponse::Value(value) => Some(value), + VMResponse::Empty => None, + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } +} + +/// Request that the VM populate its input cache for the given path. +pub(crate) async fn request_import_cache_put(co: &GenCo, path: PathBuf, value: Value) { + match co.yield_(VMRequest::ImportCachePut(path, value)).await { + VMResponse::Empty => {} + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } +} + +/// Request that the VM import the given path. +pub(crate) async fn request_path_import(co: &GenCo, path: PathBuf) -> PathBuf { + match co.yield_(VMRequest::PathImport(path)).await { + VMResponse::Path(path) => path, + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } +} + +pub(crate) async fn request_read_to_string(co: &GenCo, path: PathBuf) -> Value { + match co.yield_(VMRequest::ReadToString(path)).await { + VMResponse::Value(value) => value, + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } +} + +pub(crate) async fn request_path_exists(co: &GenCo, path: PathBuf) -> Value { + match co.yield_(VMRequest::PathExists(path)).await { + VMResponse::Value(value) => value, + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } +} + +pub(crate) async fn request_read_dir(co: &GenCo, path: PathBuf) -> Vec<(SmolStr, FileType)> { + match co.yield_(VMRequest::ReadDir(path)).await { + VMResponse::Directory(dir) => dir, + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } +} + +pub(crate) async fn request_span(co: &GenCo) -> LightSpan { + match co.yield_(VMRequest::Span).await { + VMResponse::Span(span) => span, + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } +} + +pub(crate) async fn request_to_json(co: &GenCo, value: Value) -> serde_json::Value { + match co.yield_(VMRequest::ToJson(value)).await { + VMResponse::Value(Value::Json(json)) => json, + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } +} + +/// Call the given value as if it was an attribute set containing a functor. The +/// arguments must already be prepared on the stack when a generator frame from +/// this function is invoked. +/// +pub(crate) async fn call_functor(co: GenCo, value: Value) -> Result<Value, ErrorKind> { + let attrs = value.to_attrs()?; + + match attrs.select("__functor") { + None => Err(ErrorKind::NotCallable("set without `__functor_` attribute")), + Some(functor) => { + // The functor receives the set itself as its first argument and + // needs to be called with it. + let functor = request_force(&co, functor.clone()).await; + let primed = request_call_with(&co, functor, [value]).await; + Ok(request_call(&co, primed).await) + } + } +} diff --git a/tvix/eval/src/vm/macros.rs b/tvix/eval/src/vm/macros.rs new file mode 100644 index 000000000000..969b50807766 --- /dev/null +++ b/tvix/eval/src/vm/macros.rs @@ -0,0 +1,70 @@ +/// This module provides macros which are used in the implementation +/// of the VM for the implementation of repetitive operations. + +/// This macro simplifies the implementation of arithmetic operations, +/// correctly handling the behaviour on different pairings of number +/// types. +#[macro_export] +macro_rules! arithmetic_op { + ( $self:ident, $op:tt ) => {{ // TODO: remove + let b = $self.pop(); + let a = $self.pop(); + let result = fallible!($self, arithmetic_op!(&a, &b, $op)); + $self.push(result); + }}; + + ( $a:expr, $b:expr, $op:tt ) => {{ + match ($a, $b) { + (Value::Integer(i1), Value::Integer(i2)) => Ok(Value::Integer(i1 $op i2)), + (Value::Float(f1), Value::Float(f2)) => Ok(Value::Float(f1 $op f2)), + (Value::Integer(i1), Value::Float(f2)) => Ok(Value::Float(*i1 as f64 $op f2)), + (Value::Float(f1), Value::Integer(i2)) => Ok(Value::Float(f1 $op *i2 as f64)), + + (v1, v2) => Err(ErrorKind::TypeError { + expected: "number (either int or float)", + actual: if v1.is_number() { + v2.type_of() + } else { + v1.type_of() + }, + }), + } + }}; +} + +/// This macro simplifies the implementation of comparison operations. +#[macro_export] +macro_rules! cmp_op { + ( $vm:ident, $frame:ident, $span:ident, $op:tt ) => {{ + let b = $vm.stack_pop(); + let a = $vm.stack_pop(); + + async fn compare(a: Value, b: Value, co: GenCo) -> Result<Value, ErrorKind> { + let a = generators::request_force(&co, a).await; + let b = generators::request_force(&co, b).await; + let ordering = a.nix_cmp_ordering(b, co).await?; + Ok(Value::Bool(cmp_op!(@order $op ordering))) + } + + let gen_span = $frame.current_light_span(); + $vm.push_call_frame($span, $frame); + $vm.enqueue_generator("compare", gen_span, |co| compare(a, b, co)); + return Ok(false); + }}; + + (@order < $ordering:expr) => { + $ordering == Some(Ordering::Less) + }; + + (@order > $ordering:expr) => { + $ordering == Some(Ordering::Greater) + }; + + (@order <= $ordering:expr) => { + !matches!($ordering, None | Some(Ordering::Greater)) + }; + + (@order >= $ordering:expr) => { + !matches!($ordering, None | Some(Ordering::Less)) + }; +} diff --git a/tvix/eval/src/vm/mod.rs b/tvix/eval/src/vm/mod.rs new file mode 100644 index 000000000000..4af23a72d73b --- /dev/null +++ b/tvix/eval/src/vm/mod.rs @@ -0,0 +1,1235 @@ +//! This module implements the abstract/virtual machine that runs Tvix +//! bytecode. +//! +//! The operation of the VM is facilitated by the [`Frame`] type, +//! which controls the current execution state of the VM and is +//! processed within the VM's operating loop. +//! +//! A [`VM`] is used by instantiating it with an initial [`Frame`], +//! then triggering its execution and waiting for the VM to return or +//! yield an error. + +pub mod generators; +mod macros; + +use codemap::Span; +use serde_json::json; +use std::{cmp::Ordering, collections::HashMap, ops::DerefMut, path::PathBuf, rc::Rc}; + +use crate::{ + arithmetic_op, + chunk::Chunk, + cmp_op, + compiler::GlobalsMap, + errors::{Error, ErrorKind, EvalResult}, + io::EvalIO, + nix_search_path::NixSearchPath, + observer::RuntimeObserver, + opcode::{CodeIdx, Count, JumpOffset, OpCode, StackIdx, UpvalueIdx}, + spans::LightSpan, + upvalues::Upvalues, + value::{ + Builtin, BuiltinResult, Closure, CoercionKind, Lambda, NixAttrs, NixList, PointerEquality, + SharedThunkSet, Thunk, Value, + }, + vm::generators::GenCo, + warnings::{EvalWarning, WarningKind}, +}; + +use generators::{call_functor, Generator, GeneratorState}; + +use self::generators::{VMRequest, VMResponse}; + +/// Internal helper trait for taking a span from a variety of types, to make use +/// of `WithSpan` (defined below) more ergonomic at call sites. +trait GetSpan { + fn get_span(self) -> Span; +} + +impl<'o> GetSpan for &VM<'o> { + fn get_span(self) -> Span { + self.reasonable_span.span() + } +} + +impl GetSpan for &CallFrame { + fn get_span(self) -> Span { + self.current_span() + } +} + +impl GetSpan for &LightSpan { + fn get_span(self) -> Span { + self.span() + } +} + +impl GetSpan for Span { + fn get_span(self) -> Span { + self + } +} + +/// Internal helper trait for ergonomically converting from a `Result<T, +/// ErrorKind>` to a `Result<T, Error>` using the current span of a call frame, +/// and chaining the VM's frame stack around it for printing a cause chain. +trait WithSpan<T, S: GetSpan> { + fn with_span(self, top_span: S, vm: &VM) -> Result<T, Error>; +} + +impl<T, S: GetSpan> WithSpan<T, S> for Result<T, ErrorKind> { + fn with_span(self, top_span: S, vm: &VM) -> Result<T, Error> { + match self { + Ok(something) => Ok(something), + Err(kind) => { + let mut error = Error::new(kind, top_span.get_span()); + + // Short-circuit the wrapping if we're dealing with tryEval, in + // which case the error is hidden and does not need to be + // exhaustive. + if !vm.try_eval_frames.is_empty() && error.kind.is_catchable() { + return Err(error); + } + + // Wrap the top-level error in chaining errors for each element + // of the frame stack. + for frame in vm.frames.iter().rev() { + match frame { + Frame::CallFrame { span, .. } => { + error = + Error::new(ErrorKind::BytecodeError(Box::new(error)), span.span()); + } + Frame::Generator { name, span, .. } => { + error = Error::new( + ErrorKind::NativeError { + err: Box::new(error), + gen_type: name, + }, + span.span(), + ); + } + } + } + + Err(error) + } + } + } +} + +struct CallFrame { + /// The lambda currently being executed. + lambda: Rc<Lambda>, + + /// Optional captured upvalues of this frame (if a thunk or + /// closure if being evaluated). + upvalues: Rc<Upvalues>, + + /// Instruction pointer to the instruction currently being + /// executed. + ip: CodeIdx, + + /// Stack offset, i.e. the frames "view" into the VM's full stack. + stack_offset: usize, +} + +impl CallFrame { + /// Retrieve an upvalue from this frame at the given index. + fn upvalue(&self, idx: UpvalueIdx) -> &Value { + &self.upvalues[idx] + } + + /// Borrow the chunk of this frame's lambda. + fn chunk(&self) -> &Chunk { + &self.lambda.chunk + } + + /// Increment this frame's instruction pointer and return the operation that + /// the pointer moved past. + fn inc_ip(&mut self) -> OpCode { + let op = self.chunk()[self.ip]; + self.ip += 1; + op + } + + /// Construct an error result from the given ErrorKind and the source span + /// of the current instruction. + pub fn error<T>(&self, vm: &VM, kind: ErrorKind) -> Result<T, Error> { + Err(kind).with_span(self, vm) + } + + /// Returns the current span. This is potentially expensive and should only + /// be used when actually constructing an error or warning. + pub fn current_span(&self) -> Span { + self.chunk().get_span(self.ip - 1) + } + + /// Returns the information needed to calculate the current span, + /// but without performing that calculation. + // TODO: why pub? + pub(crate) fn current_light_span(&self) -> LightSpan { + LightSpan::new_delayed(self.lambda.clone(), self.ip - 1) + } +} + +/// A frame represents an execution state of the VM. The VM has a stack of +/// frames representing the nesting of execution inside of the VM, and operates +/// on the frame at the top. +/// +/// When a frame has been fully executed, it is removed from the VM's frame +/// stack and expected to leave a result [`Value`] on the top of the stack. +enum Frame { + /// CallFrame represents the execution of Tvix bytecode within a thunk, + /// function or closure. + CallFrame { + /// The call frame itself, separated out into another type to pass it + /// around easily. + call_frame: CallFrame, + + /// Span from which the call frame was launched. + span: LightSpan, + }, + + /// Generator represents a frame that can yield further + /// instructions to the VM while its execution is being driven. + /// + /// A generator is essentially an asynchronous function that can + /// be suspended while waiting for the VM to do something (e.g. + /// thunk forcing), and resume at the same point. + Generator { + /// human-readable description of the generator, + name: &'static str, + + /// Span from which the generator was launched. + span: LightSpan, + + state: GeneratorState, + + /// Generator itself, which can be resumed with `.resume()`. + generator: Generator, + }, +} + +impl Frame { + pub fn span(&self) -> LightSpan { + match self { + Frame::CallFrame { span, .. } | Frame::Generator { span, .. } => span.clone(), + } + } +} + +struct VM<'o> { + /// VM's frame stack, representing the execution contexts the VM is working + /// through. Elements are usually pushed when functions are called, or + /// thunks are being forced. + frames: Vec<Frame>, + + /// The VM's top-level value stack. Within this stack, each code-executing + /// frame holds a "view" of the stack representing the slice of the + /// top-level stack that is relevant to its operation. This is done to avoid + /// allocating a new `Vec` for each frame's stack. + pub(crate) stack: Vec<Value>, + + /// Stack indices (absolute indexes into `stack`) of attribute + /// sets from which variables should be dynamically resolved + /// (`with`). + with_stack: Vec<usize>, + + /// Runtime warnings collected during evaluation. + warnings: Vec<EvalWarning>, + + /// Import cache, mapping absolute file paths to the value that + /// they compile to. Note that this reuses thunks, too! + // TODO: should probably be based on a file hash + pub import_cache: Box<HashMap<PathBuf, Value>>, + + /// Parsed Nix search path, which is used to resolve `<...>` + /// references. + nix_search_path: NixSearchPath, + + /// Implementation of I/O operations used for impure builtins and + /// features like `import`. + io_handle: Box<dyn EvalIO>, + + /// Runtime observer which can print traces of runtime operations. + observer: &'o mut dyn RuntimeObserver, + + /// Strong reference to the globals, guaranteeing that they are + /// kept alive for the duration of evaluation. + /// + /// This is important because recursive builtins (specifically + /// `import`) hold a weak reference to the builtins, while the + /// original strong reference is held by the compiler which does + /// not exist anymore at runtime. + #[allow(dead_code)] + globals: Rc<GlobalsMap>, + + /// A reasonably applicable span that can be used for errors in each + /// execution situation. + /// + /// The VM should update this whenever control flow changes take place (i.e. + /// entering or exiting a frame to yield control somewhere). + reasonable_span: LightSpan, + + /// This field is responsible for handling `builtins.tryEval`. When that + /// builtin is encountered, it sends a special message to the VM which + /// pushes the frame index that requested to be informed of catchable + /// errors in this field. + /// + /// The frame stack is then laid out like this: + /// + /// ```notrust + /// ┌──┬──────────────────────────┐ + /// │ 0│ `Result`-producing frame │ + /// ├──┼──────────────────────────┤ + /// │-1│ `builtins.tryEval` frame │ + /// ├──┼──────────────────────────┤ + /// │..│ ... other frames ... │ + /// └──┴──────────────────────────┘ + /// ``` + /// + /// Control is yielded to the outer VM loop, which evaluates the next frame + /// and returns the result itself to the `builtins.tryEval` frame. + try_eval_frames: Vec<usize>, +} + +impl<'o> VM<'o> { + pub fn new( + nix_search_path: NixSearchPath, + io_handle: Box<dyn EvalIO>, + observer: &'o mut dyn RuntimeObserver, + globals: Rc<GlobalsMap>, + reasonable_span: LightSpan, + ) -> Self { + Self { + nix_search_path, + io_handle, + observer, + globals, + reasonable_span, + frames: vec![], + stack: vec![], + with_stack: vec![], + warnings: vec![], + import_cache: Default::default(), + try_eval_frames: vec![], + } + } + + /// Push a call frame onto the frame stack. + fn push_call_frame(&mut self, span: LightSpan, call_frame: CallFrame) { + self.frames.push(Frame::CallFrame { span, call_frame }) + } + + /// Run the VM's primary (outer) execution loop, continuing execution based + /// on the current frame at the top of the frame stack. + fn execute(mut self) -> EvalResult<RuntimeResult> { + let mut catchable_error_occurred = false; + + while let Some(frame) = self.frames.pop() { + self.reasonable_span = frame.span(); + let frame_id = self.frames.len(); + + match frame { + Frame::CallFrame { call_frame, span } => { + self.observer + .observe_enter_call_frame(0, &call_frame.lambda, frame_id); + + match self.execute_bytecode(span, call_frame) { + Ok(true) => self.observer.observe_exit_call_frame(frame_id, &self.stack), + Ok(false) => self + .observer + .observe_suspend_call_frame(frame_id, &self.stack), + + Err(err) => { + if let Some(catching_frame_idx) = self.try_eval_frames.pop() { + if err.kind.is_catchable() { + self.observer.observe_exit_call_frame(frame_id, &self.stack); + catchable_error_occurred = true; + + // truncate the frame stack back to the + // frame that can catch this error + self.frames.truncate(/* len = */ catching_frame_idx + 1); + continue; + } + } + + return Err(err); + } + }; + } + + // Handle generator frames, which can request thunk forcing + // during their execution. + Frame::Generator { + name, + span, + state, + generator, + } => { + self.observer + .observe_enter_generator(frame_id, name, &self.stack); + + let initial_msg = if catchable_error_occurred { + catchable_error_occurred = false; + Some(VMResponse::ForceError) + } else { + None + }; + + match self.run_generator(name, span, frame_id, state, generator, initial_msg) { + Ok(true) => { + self.observer + .observe_exit_generator(frame_id, name, &self.stack) + } + Ok(false) => { + self.observer + .observe_suspend_generator(frame_id, name, &self.stack) + } + + Err(err) => { + if let Some(catching_frame_idx) = self.try_eval_frames.pop() { + if err.kind.is_catchable() { + self.observer.observe_exit_generator( + frame_id, + name, + &self.stack, + ); + catchable_error_occurred = true; + + // truncate the frame stack back to the + // frame that can catch this error + self.frames.truncate(/* len = */ catching_frame_idx + 1); + continue; + } + } + + return Err(err); + } + }; + } + } + } + + // Once no more frames are present, return the stack's top value as the + // result. + Ok(RuntimeResult { + value: self + .stack + .pop() + .expect("tvix bug: runtime stack empty after execution"), + + warnings: self.warnings, + }) + } + + /// Run the VM's inner execution loop, processing Tvix bytecode from a + /// chunk. This function returns if: + /// + /// 1. The code has run to the end, and has left a value on the top of the + /// stack. In this case, the frame is not returned to the frame stack. + /// + /// 2. The code encounters a generator, in which case the frame in its + /// current state is pushed back on the stack, and the generator is left on + /// top of it for the outer loop to execute. + /// + /// 3. An error is encountered. + /// + /// This function *must* ensure that it leaves the frame stack in the + /// correct order, especially when re-enqueuing a frame to execute. + /// + /// The return value indicates whether the bytecode has been executed to + /// completion, or whether it has been suspended in favour of a generator. + fn execute_bytecode(&mut self, span: LightSpan, mut frame: CallFrame) -> EvalResult<bool> { + loop { + let op = frame.inc_ip(); + self.observer.observe_execute_op(frame.ip, &op, &self.stack); + + match op { + OpCode::OpThunkSuspended(idx) | OpCode::OpThunkClosure(idx) => { + let blueprint = match &frame.chunk()[idx] { + Value::Blueprint(lambda) => lambda.clone(), + _ => panic!("compiler bug: non-blueprint in blueprint slot"), + }; + + let upvalue_count = blueprint.upvalue_count; + let thunk = if matches!(op, OpCode::OpThunkClosure(_)) { + debug_assert!( + upvalue_count > 0, + "OpThunkClosure should not be called for plain lambdas" + ); + Thunk::new_closure(blueprint) + } else { + Thunk::new_suspended(blueprint, frame.current_light_span()) + }; + let upvalues = thunk.upvalues_mut(); + self.stack.push(Value::Thunk(thunk.clone())); + + // From this point on we internally mutate the + // upvalues. The closure (if `is_closure`) is + // already in its stack slot, which means that it + // can capture itself as an upvalue for + // self-recursion. + self.populate_upvalues(&mut frame, upvalue_count, upvalues)?; + } + + OpCode::OpForce => { + if let Some(Value::Thunk(_)) = self.stack.last() { + let thunk = match self.stack_pop() { + Value::Thunk(t) => t, + _ => unreachable!(), + }; + + let gen_span = frame.current_light_span(); + + self.push_call_frame(span, frame); + self.enqueue_generator("force", gen_span.clone(), |co| { + thunk.force(co, gen_span) + }); + + return Ok(false); + } + } + + OpCode::OpGetUpvalue(upv_idx) => { + let value = frame.upvalue(upv_idx).clone(); + self.stack.push(value); + } + + // Discard the current frame. + OpCode::OpReturn => { + return Ok(true); + } + + OpCode::OpConstant(idx) => { + let c = frame.chunk()[idx].clone(); + self.stack.push(c); + } + + OpCode::OpCall => { + let callable = self.stack_pop(); + self.call_value(frame.current_light_span(), Some((span, frame)), callable)?; + + // exit this loop and let the outer loop enter the new call + return Ok(true); + } + + // Remove the given number of elements from the stack, + // but retain the top value. + OpCode::OpCloseScope(Count(count)) => { + // Immediately move the top value into the right + // position. + let target_idx = self.stack.len() - 1 - count; + self.stack[target_idx] = self.stack_pop(); + + // Then drop the remaining values. + for _ in 0..(count - 1) { + self.stack.pop(); + } + } + + OpCode::OpClosure(idx) => { + let blueprint = match &frame.chunk()[idx] { + Value::Blueprint(lambda) => lambda.clone(), + _ => panic!("compiler bug: non-blueprint in blueprint slot"), + }; + + let upvalue_count = blueprint.upvalue_count; + debug_assert!( + upvalue_count > 0, + "OpClosure should not be called for plain lambdas" + ); + + let mut upvalues = Upvalues::with_capacity(blueprint.upvalue_count); + self.populate_upvalues(&mut frame, upvalue_count, &mut upvalues)?; + self.stack + .push(Value::Closure(Rc::new(Closure::new_with_upvalues( + Rc::new(upvalues), + blueprint, + )))); + } + + OpCode::OpAttrsSelect => { + let key = self.stack_pop().to_str().with_span(&frame, self)?; + let attrs = self.stack_pop().to_attrs().with_span(&frame, self)?; + + match attrs.select(key.as_str()) { + Some(value) => self.stack.push(value.clone()), + + None => { + return frame.error( + self, + ErrorKind::AttributeNotFound { + name: key.as_str().to_string(), + }, + ); + } + } + } + + OpCode::OpJumpIfFalse(JumpOffset(offset)) => { + debug_assert!(offset != 0); + if !self.stack_peek(0).as_bool().with_span(&frame, self)? { + frame.ip += offset; + } + } + + OpCode::OpPop => { + self.stack.pop(); + } + + OpCode::OpAttrsTrySelect => { + let key = self.stack_pop().to_str().with_span(&frame, self)?; + let value = match self.stack_pop() { + Value::Attrs(attrs) => match attrs.select(key.as_str()) { + Some(value) => value.clone(), + None => Value::AttrNotFound, + }, + + _ => Value::AttrNotFound, + }; + + self.stack.push(value); + } + + OpCode::OpGetLocal(StackIdx(local_idx)) => { + let idx = frame.stack_offset + local_idx; + self.stack.push(self.stack[idx].clone()); + } + + OpCode::OpJumpIfNotFound(JumpOffset(offset)) => { + debug_assert!(offset != 0); + if matches!(self.stack_peek(0), Value::AttrNotFound) { + self.stack_pop(); + frame.ip += offset; + } + } + + OpCode::OpJump(JumpOffset(offset)) => { + debug_assert!(offset != 0); + frame.ip += offset; + } + + OpCode::OpEqual => { + let b = self.stack_pop(); + let a = self.stack_pop(); + let gen_span = frame.current_light_span(); + self.push_call_frame(span, frame); + self.enqueue_generator("nix_eq", gen_span, |co| { + a.nix_eq(b, co, PointerEquality::ForbidAll) + }); + return Ok(false); + } + + // These assertion operations error out if the stack + // top is not of the expected type. This is necessary + // to implement some specific behaviours of Nix + // exactly. + OpCode::OpAssertBool => { + let val = self.stack_peek(0); + if !val.is_bool() { + return frame.error( + self, + ErrorKind::TypeError { + expected: "bool", + actual: val.type_of(), + }, + ); + } + } + + OpCode::OpAttrs(Count(count)) => self.run_attrset(&frame, count)?, + + OpCode::OpAttrsUpdate => { + let rhs = self.stack_pop().to_attrs().with_span(&frame, self)?; + let lhs = self.stack_pop().to_attrs().with_span(&frame, self)?; + + self.stack.push(Value::attrs(lhs.update(*rhs))) + } + + OpCode::OpInvert => { + let v = self.stack_pop().as_bool().with_span(&frame, self)?; + self.stack.push(Value::Bool(!v)); + } + + OpCode::OpList(Count(count)) => { + let list = + NixList::construct(count, self.stack.split_off(self.stack.len() - count)); + + self.stack.push(Value::List(list)); + } + + OpCode::OpJumpIfTrue(JumpOffset(offset)) => { + debug_assert!(offset != 0); + if self.stack_peek(0).as_bool().with_span(&frame, self)? { + frame.ip += offset; + } + } + + OpCode::OpHasAttr => { + let key = self.stack_pop().to_str().with_span(&frame, self)?; + let result = match self.stack_pop() { + Value::Attrs(attrs) => attrs.contains(key.as_str()), + + // Nix allows use of `?` on non-set types, but + // always returns false in those cases. + _ => false, + }; + + self.stack.push(Value::Bool(result)); + } + + OpCode::OpConcat => { + let rhs = self + .stack_pop() + .to_list() + .with_span(&frame, self)? + .into_inner(); + let lhs = self + .stack_pop() + .to_list() + .with_span(&frame, self)? + .into_inner(); + self.stack.push(Value::List(NixList::from(lhs + rhs))) + } + + OpCode::OpResolveWith => { + let ident = self.stack_pop().to_str().with_span(&frame, self)?; + + // Re-enqueue this frame. + let op_span = frame.current_light_span(); + self.push_call_frame(span, frame); + + // Construct a generator frame doing the lookup in constant + // stack space. + let with_stack_len = self.with_stack.len(); + let closed_with_stack_len = self + .last_call_frame() + .map(|frame| frame.upvalues.with_stack_len()) + .unwrap_or(0); + + self.enqueue_generator("resolve_with", op_span, |co| { + resolve_with( + co, + ident.as_str().to_owned(), + with_stack_len, + closed_with_stack_len, + ) + }); + + return Ok(false); + } + + OpCode::OpFinalise(StackIdx(idx)) => { + match &self.stack[frame.stack_offset + idx] { + Value::Closure(_) => panic!("attempted to finalise a closure"), + Value::Thunk(thunk) => thunk.finalise(&self.stack[frame.stack_offset..]), + + // In functions with "formals" attributes, it is + // possible for `OpFinalise` to be called on a + // non-capturing value, in which case it is a no-op. + // + // TODO: detect this in some phase and skip the finalise; fail here + _ => { /* TODO: panic here again to catch bugs */ } + } + } + + OpCode::OpCoerceToString => { + let value = self.stack_pop(); + let gen_span = frame.current_light_span(); + self.push_call_frame(span, frame); + + self.enqueue_generator("coerce_to_string", gen_span, |co| { + value.coerce_to_string(co, CoercionKind::Weak) + }); + + return Ok(false); + } + + OpCode::OpInterpolate(Count(count)) => self.run_interpolate(&frame, count)?, + + OpCode::OpValidateClosedFormals => { + let formals = frame.lambda.formals.as_ref().expect( + "OpValidateClosedFormals called within the frame of a lambda without formals", + ); + + let args = self.stack_peek(0).to_attrs().with_span(&frame, self)?; + for arg in args.keys() { + if !formals.contains(arg) { + return frame.error( + self, + ErrorKind::UnexpectedArgument { + arg: arg.clone(), + formals_span: formals.span, + }, + ); + } + } + } + + OpCode::OpAdd => { + let b = self.stack_pop(); + let a = self.stack_pop(); + + let gen_span = frame.current_light_span(); + self.push_call_frame(span, frame); + + // OpAdd can add not just numbers, but also string-like + // things, which requires more VM logic. This operation is + // evaluated in a generator frame. + self.enqueue_generator("add_values", gen_span, |co| add_values(co, a, b)); + return Ok(false); + } + + OpCode::OpSub => { + let b = self.stack_pop(); + let a = self.stack_pop(); + let result = arithmetic_op!(&a, &b, -).with_span(&frame, self)?; + self.stack.push(result); + } + + OpCode::OpMul => { + let b = self.stack_pop(); + let a = self.stack_pop(); + let result = arithmetic_op!(&a, &b, *).with_span(&frame, self)?; + self.stack.push(result); + } + + OpCode::OpDiv => { + let b = self.stack_pop(); + + match b { + Value::Integer(0) => return frame.error(self, ErrorKind::DivisionByZero), + Value::Float(b) if b == 0.0_f64 => { + return frame.error(self, ErrorKind::DivisionByZero) + } + _ => {} + }; + + let a = self.stack_pop(); + let result = arithmetic_op!(&a, &b, /).with_span(&frame, self)?; + self.stack.push(result); + } + + OpCode::OpNegate => match self.stack_pop() { + Value::Integer(i) => self.stack.push(Value::Integer(-i)), + Value::Float(f) => self.stack.push(Value::Float(-f)), + v => { + return frame.error( + self, + ErrorKind::TypeError { + expected: "number (either int or float)", + actual: v.type_of(), + }, + ); + } + }, + + OpCode::OpLess => cmp_op!(self, frame, span, <), + OpCode::OpLessOrEq => cmp_op!(self, frame, span, <=), + OpCode::OpMore => cmp_op!(self, frame, span, >), + OpCode::OpMoreOrEq => cmp_op!(self, frame, span, >=), + + OpCode::OpFindFile => match self.stack_pop() { + Value::UnresolvedPath(path) => { + let resolved = self + .nix_search_path + .resolve(&mut *self.io_handle, *path) + .with_span(&frame, self)?; + self.stack.push(resolved.into()); + } + + _ => panic!("tvix compiler bug: OpFindFile called on non-UnresolvedPath"), + }, + + OpCode::OpResolveHomePath => match self.stack_pop() { + Value::UnresolvedPath(path) => { + match dirs::home_dir() { + None => { + return frame.error( + self, + ErrorKind::RelativePathResolution( + "failed to determine home directory".into(), + ), + ); + } + Some(mut buf) => { + buf.push(*path); + self.stack.push(buf.into()); + } + }; + } + + _ => { + panic!("tvix compiler bug: OpResolveHomePath called on non-UnresolvedPath") + } + }, + + OpCode::OpPushWith(StackIdx(idx)) => self.with_stack.push(frame.stack_offset + idx), + + OpCode::OpPopWith => { + self.with_stack.pop(); + } + + OpCode::OpAssertFail => { + frame.error(self, ErrorKind::AssertionFailed)?; + } + + // Data-carrying operands should never be executed, + // that is a critical error in the VM/compiler. + OpCode::DataStackIdx(_) + | OpCode::DataDeferredLocal(_) + | OpCode::DataUpvalueIdx(_) + | OpCode::DataCaptureWith => { + panic!("Tvix bug: attempted to execute data-carrying operand") + } + } + } + } +} + +/// Implementation of helper functions for the runtime logic above. +impl<'o> VM<'o> { + pub(crate) fn stack_pop(&mut self) -> Value { + self.stack.pop().expect("runtime stack empty") + } + + fn stack_peek(&self, offset: usize) -> &Value { + &self.stack[self.stack.len() - 1 - offset] + } + + fn run_attrset(&mut self, frame: &CallFrame, count: usize) -> EvalResult<()> { + let attrs = NixAttrs::construct(count, self.stack.split_off(self.stack.len() - count * 2)) + .with_span(frame, self)?; + + self.stack.push(Value::attrs(attrs)); + Ok(()) + } + + /// Access the last call frame present in the frame stack. + fn last_call_frame(&self) -> Option<&CallFrame> { + for frame in self.frames.iter().rev() { + if let Frame::CallFrame { call_frame, .. } = frame { + return Some(call_frame); + } + } + + None + } + + /// Push an already constructed warning. + pub fn push_warning(&mut self, warning: EvalWarning) { + self.warnings.push(warning); + } + + /// Emit a warning with the given WarningKind and the source span + /// of the current instruction. + pub fn emit_warning(&mut self, _kind: WarningKind) { + // TODO: put LightSpan in warning, calculate only *after* eval + // TODO: what to do with the spans? + // self.push_warning(EvalWarning { + // kind, + // span: self.current_span(), + // }); + } + + /// Interpolate string fragments by popping the specified number of + /// fragments of the stack, evaluating them to strings, and pushing + /// the concatenated result string back on the stack. + fn run_interpolate(&mut self, frame: &CallFrame, count: usize) -> EvalResult<()> { + let mut out = String::new(); + + for _ in 0..count { + out.push_str(self.stack_pop().to_str().with_span(frame, self)?.as_str()); + } + + self.stack.push(Value::String(out.into())); + Ok(()) + } + + /// Returns a reasonable light span for the current situation that the VM is + /// in. + pub fn reasonable_light_span(&self) -> LightSpan { + self.reasonable_span.clone() + } + + /// Apply an argument from the stack to a builtin, and attempt to call it. + /// + /// All calls are tail-calls in Tvix, as every function application is a + /// separate thunk and OpCall is thus the last result in the thunk. + /// + /// Due to this, once control flow exits this function, the generator will + /// automatically be run by the VM. + fn call_builtin(&mut self, span: LightSpan, mut builtin: Builtin) -> EvalResult<()> { + let builtin_name = builtin.name(); + self.observer.observe_enter_builtin(builtin_name); + + builtin.apply_arg(self.stack_pop()); + + match builtin.call() { + // Partially applied builtin is just pushed back on the stack. + BuiltinResult::Partial(partial) => self.stack.push(Value::Builtin(partial)), + + // Builtin is fully applied and the generator needs to be run by the VM. + BuiltinResult::Called(name, generator) => self.frames.push(Frame::Generator { + generator, + span, + name, + state: GeneratorState::Running, + }), + } + + Ok(()) + } + + fn call_value( + &mut self, + span: LightSpan, + parent: Option<(LightSpan, CallFrame)>, + callable: Value, + ) -> EvalResult<()> { + match callable { + Value::Builtin(builtin) => self.call_builtin(span, builtin), + Value::Thunk(thunk) => self.call_value(span, parent, thunk.value().clone()), + + Value::Closure(closure) => { + let lambda = closure.lambda(); + self.observer.observe_tail_call(self.frames.len(), &lambda); + + // The stack offset is always `stack.len() - arg_count`, and + // since this branch handles native Nix functions (which always + // take only a single argument and are curried), the offset is + // `stack_len - 1`. + let stack_offset = self.stack.len() - 1; + + // Reenqueue the parent frame, which should only have + // `OpReturn` left. Not throwing it away leads to more + // useful error traces. + if let Some((parent_span, parent_frame)) = parent { + self.push_call_frame(parent_span, parent_frame); + } + + self.push_call_frame( + span, + CallFrame { + lambda, + upvalues: closure.upvalues(), + ip: CodeIdx(0), + stack_offset, + }, + ); + + Ok(()) + } + + // Attribute sets with a __functor attribute are callable. + val @ Value::Attrs(_) => { + if let Some((parent_span, parent_frame)) = parent { + self.push_call_frame(parent_span, parent_frame); + } + + self.enqueue_generator("__functor call", span, |co| call_functor(co, val)); + Ok(()) + } + v => Err(ErrorKind::NotCallable(v.type_of())).with_span(&span, self), + } + } + + /// Populate the upvalue fields of a thunk or closure under construction. + fn populate_upvalues( + &mut self, + frame: &mut CallFrame, + count: usize, + mut upvalues: impl DerefMut<Target = Upvalues>, + ) -> EvalResult<()> { + for _ in 0..count { + match frame.inc_ip() { + OpCode::DataStackIdx(StackIdx(stack_idx)) => { + let idx = frame.stack_offset + stack_idx; + + let val = match self.stack.get(idx) { + Some(val) => val.clone(), + None => { + return frame.error( + self, + ErrorKind::TvixBug { + msg: "upvalue to be captured was missing on stack", + metadata: Some(Rc::new(json!({ + "ip": format!("{:#x}", frame.ip.0 - 1), + "stack_idx(relative)": stack_idx, + "stack_idx(absolute)": idx, + }))), + }, + ); + } + }; + + upvalues.deref_mut().push(val); + } + + OpCode::DataUpvalueIdx(upv_idx) => { + upvalues.deref_mut().push(frame.upvalue(upv_idx).clone()); + } + + OpCode::DataDeferredLocal(idx) => { + upvalues.deref_mut().push(Value::DeferredUpvalue(idx)); + } + + OpCode::DataCaptureWith => { + // Start the captured with_stack off of the + // current call frame's captured with_stack, ... + let mut captured_with_stack = frame + .upvalues + .with_stack() + .map(Clone::clone) + // ... or make an empty one if there isn't one already. + .unwrap_or_else(|| Vec::with_capacity(self.with_stack.len())); + + for idx in &self.with_stack { + captured_with_stack.push(self.stack[*idx].clone()); + } + + upvalues.deref_mut().set_with_stack(captured_with_stack); + } + + _ => panic!("compiler error: missing closure operand"), + } + } + + Ok(()) + } +} + +// TODO(amjoseph): de-asyncify this +/// Resolve a dynamically bound identifier (through `with`) by looking +/// for matching values in the with-stacks carried at runtime. +async fn resolve_with( + co: GenCo, + ident: String, + vm_with_len: usize, + upvalue_with_len: usize, +) -> Result<Value, ErrorKind> { + /// Fetch and force a value on the with-stack from the VM. + async fn fetch_forced_with(co: &GenCo, idx: usize) -> Value { + match co.yield_(VMRequest::WithValue(idx)).await { + VMResponse::Value(value) => value, + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } + } + + /// Fetch and force a value on the *captured* with-stack from the VM. + async fn fetch_captured_with(co: &GenCo, idx: usize) -> Value { + match co.yield_(VMRequest::CapturedWithValue(idx)).await { + VMResponse::Value(value) => value, + msg => panic!( + "Tvix bug: VM responded with incorrect generator message: {}", + msg + ), + } + } + + for with_stack_idx in (0..vm_with_len).rev() { + // TODO(tazjin): is this branch still live with the current with-thunking? + let with = fetch_forced_with(&co, with_stack_idx).await; + + match with.to_attrs()?.select(&ident) { + None => continue, + Some(val) => return Ok(val.clone()), + } + } + + for upvalue_with_idx in (0..upvalue_with_len).rev() { + let with = fetch_captured_with(&co, upvalue_with_idx).await; + + match with.to_attrs()?.select(&ident) { + None => continue, + Some(val) => return Ok(val.clone()), + } + } + + Err(ErrorKind::UnknownDynamicVariable(ident)) +} + +// TODO(amjoseph): de-asyncify this +async fn add_values(co: GenCo, a: Value, b: Value) -> Result<Value, ErrorKind> { + let result = match (a, b) { + (Value::Path(p), v) => { + let mut path = p.to_string_lossy().into_owned(); + let vs = generators::request_string_coerce(&co, v, CoercionKind::Weak).await; + path.push_str(vs.as_str()); + crate::value::canon_path(PathBuf::from(path)).into() + } + (Value::String(s1), Value::String(s2)) => Value::String(s1.concat(&s2)), + (Value::String(s1), v) => Value::String( + s1.concat(&generators::request_string_coerce(&co, v, CoercionKind::Weak).await), + ), + (v, Value::String(s2)) => Value::String( + generators::request_string_coerce(&co, v, CoercionKind::Weak) + .await + .concat(&s2), + ), + (a, b) => arithmetic_op!(&a, &b, +)?, + }; + + Ok(result) +} + +/// The result of a VM's runtime evaluation. +pub struct RuntimeResult { + pub value: Value, + pub warnings: Vec<EvalWarning>, +} + +// TODO(amjoseph): de-asyncify this +/// Generator that retrieves the final value from the stack, and deep-forces it +/// before returning. +async fn final_deep_force(co: GenCo) -> Result<Value, ErrorKind> { + let value = generators::request_stack_pop(&co).await; + Ok(generators::request_deep_force(&co, value, SharedThunkSet::default()).await) +} + +pub fn run_lambda( + nix_search_path: NixSearchPath, + io_handle: Box<dyn EvalIO>, + observer: &mut dyn RuntimeObserver, + globals: Rc<GlobalsMap>, + lambda: Rc<Lambda>, + strict: bool, +) -> EvalResult<RuntimeResult> { + // Retain the top-level span of the expression in this lambda, as + // synthetic "calls" in deep_force will otherwise not have a span + // to fall back to. + // + // We exploit the fact that the compiler emits a final instruction + // with the span of the entire file for top-level expressions. + let root_span = lambda.chunk.get_span(CodeIdx(lambda.chunk.code.len() - 1)); + + let mut vm = VM::new( + nix_search_path, + io_handle, + observer, + globals, + root_span.into(), + ); + + // When evaluating strictly, synthesise a frame that will instruct + // the VM to deep-force the final value before returning it. + if strict { + vm.enqueue_generator("final_deep_force", root_span.into(), final_deep_force); + } + + vm.frames.push(Frame::CallFrame { + span: root_span.into(), + call_frame: CallFrame { + lambda, + upvalues: Rc::new(Upvalues::with_capacity(0)), + ip: CodeIdx(0), + stack_offset: 0, + }, + }); + + vm.execute() +} diff --git a/tvix/eval/src/warnings.rs b/tvix/eval/src/warnings.rs new file mode 100644 index 000000000000..aa567f302755 --- /dev/null +++ b/tvix/eval/src/warnings.rs @@ -0,0 +1,142 @@ +//! Implements warnings that are emitted in cases where code passed to +//! Tvix exhibits problems that the user could address. + +use codemap_diagnostic::{ColorConfig, Diagnostic, Emitter, Level, SpanLabel, SpanStyle}; + +use crate::SourceCode; + +#[derive(Debug)] +pub enum WarningKind { + DeprecatedLiteralURL, + UselessInherit, + UnusedBinding, + ShadowedGlobal(&'static str), + DeprecatedLegacyLet, + InvalidNixPath(String), + UselessBoolOperation(&'static str), + DeadCode, + EmptyInherit, + EmptyLet, + + /// Tvix internal warning for features triggered by users that are + /// not actually implemented yet, but do not cause runtime failures. + NotImplemented(&'static str), +} + +#[derive(Debug)] +pub struct EvalWarning { + pub kind: WarningKind, + pub span: codemap::Span, +} + +impl EvalWarning { + /// Render a fancy, human-readable output of this warning and + /// return it as a String. Note that this version of the output + /// does not include any colours or font styles. + pub fn fancy_format_str(&self, source: &SourceCode) -> String { + let mut out = vec![]; + Emitter::vec(&mut out, Some(&*source.codemap())).emit(&[self.diagnostic(source)]); + String::from_utf8_lossy(&out).to_string() + } + + /// Render a fancy, human-readable output of this warning and + /// print it to stderr. If rendered in a terminal that supports + /// colours and font styles, the output will include those. + pub fn fancy_format_stderr(&self, source: &SourceCode) { + Emitter::stderr(ColorConfig::Auto, Some(&*source.codemap())) + .emit(&[self.diagnostic(source)]); + } + + /// Create the optional span label displayed as an annotation on + /// the underlined span of the warning. + fn span_label(&self) -> Option<String> { + match self.kind { + WarningKind::UnusedBinding | WarningKind::ShadowedGlobal(_) => { + Some("variable declared here".into()) + } + _ => None, + } + } + + /// Create the primary warning message displayed to users for a + /// warning. + fn message(&self, source: &SourceCode) -> String { + match self.kind { + WarningKind::DeprecatedLiteralURL => { + "URL literal syntax is deprecated, use a quoted string instead".to_string() + } + + WarningKind::UselessInherit => { + "inherit does nothing (this variable already exists with the same value)" + .to_string() + } + + WarningKind::UnusedBinding => { + format!( + "variable '{}' is declared, but never used:", + source.source_slice(self.span) + ) + } + + WarningKind::ShadowedGlobal(name) => { + format!("declared variable '{}' shadows a built-in global!", name) + } + + WarningKind::DeprecatedLegacyLet => { + "legacy `let` syntax used, please rewrite this as `let .. in ...`".to_string() + } + + WarningKind::InvalidNixPath(ref err) => { + format!("invalid NIX_PATH resulted in a parse error: {}", err) + } + + WarningKind::UselessBoolOperation(msg) => { + format!("useless operation on boolean: {}", msg) + } + + WarningKind::DeadCode => "this code will never be executed".to_string(), + + WarningKind::EmptyInherit => "this `inherit` statement is empty".to_string(), + + WarningKind::EmptyLet => "this `let`-expression contains no bindings".to_string(), + + WarningKind::NotImplemented(what) => { + format!("feature not yet implemented in tvix: {}", what) + } + } + } + + /// Return the unique warning code for this variant which can be + /// used to refer users to documentation. + fn code(&self) -> &'static str { + match self.kind { + WarningKind::DeprecatedLiteralURL => "W001", + WarningKind::UselessInherit => "W002", + WarningKind::UnusedBinding => "W003", + WarningKind::ShadowedGlobal(_) => "W004", + WarningKind::DeprecatedLegacyLet => "W005", + WarningKind::InvalidNixPath(_) => "W006", + WarningKind::UselessBoolOperation(_) => "W007", + WarningKind::DeadCode => "W008", + WarningKind::EmptyInherit => "W009", + WarningKind::EmptyLet => "W010", + + WarningKind::NotImplemented(_) => "W999", + } + } + + fn diagnostic(&self, source: &SourceCode) -> Diagnostic { + let span_label = SpanLabel { + label: self.span_label(), + span: self.span, + style: SpanStyle::Primary, + }; + + Diagnostic { + level: Level::Warning, + message: self.message(source), + spans: vec![span_label], + code: Some(self.code().into()), + } + } +} diff --git a/tvix/eval/tests/nix_oracle.rs b/tvix/eval/tests/nix_oracle.rs new file mode 100644 index 000000000000..ecaf53a1b21a --- /dev/null +++ b/tvix/eval/tests/nix_oracle.rs @@ -0,0 +1,157 @@ +//! Tests which use upstream nix as an oracle to test evaluation against + +use std::{env, path::PathBuf, process::Command}; + +use pretty_assertions::assert_eq; +use tempdir::TempDir; + +fn nix_binary_path() -> PathBuf { + env::var("NIX_INSTANTIATE_BINARY_PATH") + .unwrap_or_else(|_| "nix-instantiate".to_owned()) + .into() +} + +#[derive(Clone, Copy)] +enum Strictness { + Lazy, + Strict, +} + +fn nix_eval(expr: &str, strictness: Strictness) -> String { + let store_dir = TempDir::new("store-dir").unwrap(); + + let mut args = match strictness { + Strictness::Lazy => vec![], + Strictness::Strict => vec!["--strict"], + }; + args.extend_from_slice(&["--eval", "-E"]); + + let output = Command::new(nix_binary_path()) + .args(&args[..]) + .arg(format!("({expr})")) + .env( + "NIX_REMOTE", + format!("local?root={}", store_dir.path().display()), + ) + .output() + .unwrap(); + if !output.status.success() { + panic!( + "nix eval {expr} failed!\n stdout: {}\n stderr: {}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr) + ) + } + + String::from_utf8(output.stdout).unwrap() +} + +/// Compare the evaluation of the given nix expression in nix (using the +/// `NIX_INSTANTIATE_BINARY_PATH` env var to resolve the `nix-instantiate` binary) and tvix, and +/// assert that the result is identical +#[track_caller] +fn compare_eval(expr: &str, strictness: Strictness) { + let nix_result = nix_eval(expr, strictness); + let mut eval = tvix_eval::Evaluation::new(expr, None); + eval.strict = matches!(strictness, Strictness::Strict); + eval.io_handle = Box::new(tvix_eval::StdIO); + + let tvix_result = eval + .evaluate() + .value + .expect("tvix evaluation should succeed") + .to_string(); + + assert_eq!(nix_result.trim(), tvix_result); +} + +/// Generate a suite of tests which call [`compare_eval`] on expressions, checking that nix and tvix +/// return identical results. +macro_rules! compare_eval_tests { + ($strictness:expr, {}) => {}; + ($strictness:expr, {$(#[$meta:meta])* $test_name: ident($expr: expr); $($rest:tt)*}) => { + #[test] + $(#[$meta])* + fn $test_name() { + compare_eval($expr, $strictness); + } + + compare_eval_tests!($strictness, { $($rest)* }); + } +} + +macro_rules! compare_strict_eval_tests { + ($($tests:tt)*) => { + compare_eval_tests!(Strictness::Lazy, { $($tests)* }); + } +} + +macro_rules! compare_lazy_eval_tests { + ($($tests:tt)*) => { + compare_eval_tests!(Strictness::Lazy, { $($tests)* }); + } +} + +compare_strict_eval_tests! { + literal_int("1"); + add_ints("1 + 1"); + add_lists("[1 2] ++ [3 4]"); + add_paths(r#"[ + (./. + "/") + (./foo + "bar") + (let name = "bar"; in ./foo + name) + (let name = "bar"; in ./foo + "${name}") + (let name = "bar"; in ./foo + "/" + "${name}") + (let name = "bar"; in ./foo + "/${name}") + (./. + ./.) + ]"#); +} + +// TODO(sterni): tvix_tests should gain support for something similar in the future, +// but this requires messing with the path naming which would break compat with +// C++ Nix's test suite +compare_lazy_eval_tests! { + // Wrap every expression type supported by [Compiler::compile] in a list + // with lazy evaluation enabled, so we can check it being thunked or not + // against C++ Nix. + unthunked_literals_in_list("[ https://tvl.fyi 1 1.2 ]"); + unthunked_path_in_list("[ ./nix_oracle.rs ]"); + unthunked_string_literal_in_list("[ \":thonking:\" ]"); + thunked_unary_ops_in_list("[ (!true) (-1) ]"); + thunked_bin_ops_in_list(r#" + let + # Necessary to fool the optimiser for && and || + true' = true; + false' = false; + in + [ + (true' && false') + (true' || false') + (false -> true) + (40 + 2) + (43 - 1) + (21 * 2) + (126 / 3) + ({ } // { bar = null; }) + (12 == 13) + (3 < 2) + (4 > 2) + (23 >= 42) + (33 <= 22) + ([ ] ++ [ ]) + (42 != null) + ] + "#); + thunked_has_attrs_in_list("[ ({ } ? foo) ]"); + thunked_list_in_list("[ [ 1 2 3 ] ]"); + thunked_attr_set_in_list("[ { foo = null; } ]"); + thunked_select_in_list("[ ({ foo = null; }.bar) ]"); + thunked_assert_in_list("[ (assert false; 12) ]"); + thunked_if_in_list("[ (if false then 13 else 12) ]"); + thunked_let_in_list("[ (let foo = 12; in foo) ]"); + thunked_with_in_list("[ (with { foo = 13; }; fooo) ]"); + unthunked_identifier_in_list("let foo = 12; in [ foo ]"); + thunked_lambda_in_list("[ (x: x) ]"); + thunked_function_application_in_list("[ (builtins.add 1 2) ]"); + thunked_legacy_let_in_list("[ (let { foo = 12; body = foo; }) ]"); +} diff --git a/tvix/nix-compat/Cargo.toml b/tvix/nix-compat/Cargo.toml new file mode 100644 index 000000000000..49ddbf4728e9 --- /dev/null +++ b/tvix/nix-compat/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "nix-compat" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +anyhow = "1.0.68" +data-encoding = "2.3.3" +glob = "0.3.0" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +sha2 = "0.10.6" +thiserror = "1.0.38" + +[dev-dependencies] +serde_json = "1.0" +test-case = "2.2.2" + +[dev-dependencies.test-generator] +# This fork of test-generator adds support for cargo workspaces, see +# also https://github.com/frehberg/test-generator/pull/14 +git = "https://github.com/JamesGuthrie/test-generator.git" +rev = "82e799979980962aec1aa324ec6e0e4cad781f41" diff --git a/tvix/nix-compat/default.nix b/tvix/nix-compat/default.nix new file mode 100644 index 000000000000..7f51438eb70d --- /dev/null +++ b/tvix/nix-compat/default.nix @@ -0,0 +1,5 @@ +{ depot, ... }: + +depot.tvix.crates.workspaceMembers.nix-compat.build.override { + runTests = true; +} diff --git a/tvix/nix-compat/src/derivation/errors.rs b/tvix/nix-compat/src/derivation/errors.rs new file mode 100644 index 000000000000..8e9e6a121096 --- /dev/null +++ b/tvix/nix-compat/src/derivation/errors.rs @@ -0,0 +1,59 @@ +use crate::{nixbase32::Nixbase32DecodeError, store_path}; +use thiserror::Error; + +/// Errors that can occur during the validation of Derivation structs. +#[derive(Debug, Error, PartialEq)] +pub enum DerivationError { + // outputs + #[error("no outputs defined")] + NoOutputs(), + #[error("invalid output name: {0}")] + InvalidOutputName(String), + #[error("encountered fixed-output derivation, but more than 1 output in total")] + MoreThanOneOutputButFixed(), + #[error("invalid output name for fixed-output derivation: {0}")] + InvalidOutputNameForFixed(String), + #[error("unable to validate output {0}: {1}")] + InvalidOutput(String, OutputError), + #[error("unable to validate output {0}: {1}")] + InvalidOutputDerivationPath(String, store_path::BuildStorePathError), + // input derivation + #[error("unable to parse input derivation path {0}: {1}")] + InvalidInputDerivationPath(String, store_path::Error), + #[error("input derivation {0} doesn't end with .drv")] + InvalidInputDerivationPrefix(String), + #[error("input derivation {0} output names are empty")] + EmptyInputDerivationOutputNames(String), + #[error("input derivation {0} output name {1} is invalid")] + InvalidInputDerivationOutputName(String, String), + + // input sources + #[error("unable to parse input sources path {0}: {1}")] + InvalidInputSourcesPath(String, store_path::Error), + + // platform + #[error("invalid platform field: {0}")] + InvalidPlatform(String), + + // builder + #[error("invalid builder field: {0}")] + InvalidBuilder(String), + + // environment + #[error("invalid environment key {0}")] + InvalidEnvironmentKey(String), +} + +/// Errors that can occur during the validation of a specific +// [crate::derivation::Output] of a [crate::derivation::Derviation]. +#[derive(Debug, Error, PartialEq)] +pub enum OutputError { + #[error("Invalid output path {0}: {1}")] + InvalidOutputPath(String, store_path::Error), + #[error("Invalid hash encoding: {0}")] + InvalidHashEncoding(String, Nixbase32DecodeError), + #[error("Invalid hash algo: {0}")] + InvalidHashAlgo(String), + #[error("Invalid Digest size {0} for algo {1}")] + InvalidDigestSizeForAlgo(usize, String), +} diff --git a/tvix/nix-compat/src/derivation/mod.rs b/tvix/nix-compat/src/derivation/mod.rs new file mode 100644 index 000000000000..ab1471165549 --- /dev/null +++ b/tvix/nix-compat/src/derivation/mod.rs @@ -0,0 +1,240 @@ +use crate::store_path::{ + self, build_output_path, build_regular_ca_path, build_text_path, StorePath, +}; +use serde::{Deserialize, Serialize}; +use sha2::{Digest, Sha256}; +use std::collections::{BTreeMap, BTreeSet}; + +mod errors; +mod output; +mod string_escape; +mod validate; +mod write; + +#[cfg(test)] +mod tests; + +// Public API of the crate. +pub use crate::nixhash::{NixHash, NixHashWithMode}; +pub use errors::{DerivationError, OutputError}; +pub use output::Output; + +#[derive(Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize)] +pub struct Derivation { + #[serde(rename = "args")] + pub arguments: Vec<String>, + + pub builder: String, + + #[serde(rename = "env")] + pub environment: BTreeMap<String, String>, + + #[serde(rename = "inputDrvs")] + pub input_derivations: BTreeMap<String, BTreeSet<String>>, + + #[serde(rename = "inputSrcs")] + pub input_sources: BTreeSet<String>, + + pub outputs: BTreeMap<String, Output>, + + pub system: String, +} + +impl Derivation { + /// write the Derivation to the given [std::fmt::Write], in ATerm format. + /// + /// The only errors returns are these when writing to the passed writer. + pub fn serialize(&self, writer: &mut impl std::fmt::Write) -> Result<(), std::fmt::Error> { + writer.write_str(write::DERIVATION_PREFIX)?; + writer.write_char(write::PAREN_OPEN)?; + + write::write_outputs(writer, &self.outputs)?; + write::write_input_derivations(writer, &self.input_derivations)?; + write::write_input_sources(writer, &self.input_sources)?; + write::write_system(writer, &self.system)?; + write::write_builder(writer, &self.builder)?; + write::write_arguments(writer, &self.arguments)?; + write::write_enviroment(writer, &self.environment)?; + + writer.write_char(write::PAREN_CLOSE)?; + + Ok(()) + } + + /// return the ATerm serialization as a string. + pub fn to_aterm_string(&self) -> String { + let mut buffer = String::new(); + + // invoke serialize and write to the buffer. + // Note we only propagate errors writing to the writer in serialize, + // which won't panic for the string we write to. + self.serialize(&mut buffer).unwrap(); + + buffer + } + + /// Returns the drv path of a [Derivation] struct. + /// + /// The drv path is calculated by invoking [build_text_path], using + /// the `name` with a `.drv` suffix as name, all [Derivation::input_sources] and + /// keys of [Derivation::input_derivations] as references, and the ATerm string of + /// the [Derivation] as content. + pub fn calculate_derivation_path(&self, name: &str) -> Result<StorePath, DerivationError> { + // append .drv to the name + let name = &format!("{}.drv", name); + + // collect the list of paths from input_sources and input_derivations + // into a (sorted, guaranteed by BTreeSet) list of references + let references: BTreeSet<String> = { + let mut inputs = self.input_sources.clone(); + let input_derivation_keys: Vec<String> = + self.input_derivations.keys().cloned().collect(); + inputs.extend(input_derivation_keys); + inputs + }; + + build_text_path(name, self.to_aterm_string(), references) + .map_err(|_e| DerivationError::InvalidOutputName(name.to_string())) + } + + /// Returns the FOD digest, if the derivation is fixed-output, or None if + /// it's not. + fn fod_digest(&self) -> Option<Vec<u8>> { + if self.outputs.len() != 1 { + return None; + } + + let out_output = self.outputs.get("out")?; + Some( + Sha256::new_with_prefix(format!( + "fixed:out:{}:{}", + out_output.hash_with_mode.clone()?.to_nix_hash_string(), + out_output.path + )) + .finalize() + .to_vec(), + ) + } + + /// Calculates the hash of a derivation modulo fixed-output subderivations. + /// + /// This is called `hashDerivationModulo` in nixcpp. + /// + /// It returns a [NixHash], created by calculating the sha256 digest of + /// the derivation ATerm representation, except that: + /// - any input derivation paths have beed replaced "by the result of a + /// recursive call to this function" and that + /// - for fixed-output derivations the special + /// `fixed:out:${algo}:${digest}:${fodPath}` string is hashed instead of + /// the A-Term. + /// + /// If the derivation is not a fixed derivation, it's up to the caller of + /// this function to provide a lookup function to lookup these calculation + /// results of parent derivations at `fn_get_hash_derivation_modulo` (by + /// drv path). + pub fn derivation_or_fod_hash<F>(&self, fn_get_derivation_or_fod_hash: F) -> NixHash + where + F: Fn(&str) -> NixHash, + { + // Fixed-output derivations return a fixed hash. + // Non-Fixed-output derivations return a hash of the ATerm notation, but with all + // input_derivation paths replaced by a recursive call to this function. + // We use fn_get_derivation_or_fod_hash here, so callers can precompute this. + let digest = self.fod_digest().unwrap_or({ + // This is a new map from derivation_or_fod_hash.digest (as lowerhex) + // to list of output names + let mut replaced_input_derivations: BTreeMap<String, BTreeSet<String>> = + BTreeMap::new(); + + // For each input_derivation, look up the + // derivation_or_fod_hash, and replace the derivation path with it's HEXLOWER + // digest. + // This is not the [NixHash::to_nix_hash_string], but without the sha256: prefix). + for (drv_path, output_names) in &self.input_derivations { + replaced_input_derivations.insert( + data_encoding::HEXLOWER.encode(&fn_get_derivation_or_fod_hash(drv_path).digest), + output_names.clone(), + ); + } + + // construct a new derivation struct with these replaced input derivation strings + let replaced_derivation = Derivation { + input_derivations: replaced_input_derivations, + ..self.clone() + }; + + // write the ATerm of that to the hash function + let mut hasher = Sha256::new(); + hasher.update(replaced_derivation.to_aterm_string()); + + hasher.finalize().to_vec() + }); + NixHash::new(crate::nixhash::HashAlgo::Sha256, digest.to_vec()) + } + + /// This calculates all output paths of a Derivation and updates the struct. + /// It requires the struct to be initially without output paths. + /// This means, self.outputs[$outputName].path needs to be an empty string, + /// and self.environment[$outputName] needs to be an empty string. + /// + /// Output path calculation requires knowledge of the + /// derivation_or_fod_hash [NixHash], which (in case of non-fixed-output + /// derivations) also requires knowledge of other hash_derivation_modulo + /// [NixHash]es. + /// + /// We solve this by asking the caller of this function to provide the + /// hash_derivation_modulo of the current Derivation. + /// + /// On completion, self.environment[$outputName] and + /// self.outputs[$outputName].path are set to the calculated output path for all + /// outputs. + pub fn calculate_output_paths( + &mut self, + name: &str, + derivation_or_fod_hash: &NixHash, + ) -> Result<(), DerivationError> { + // The fingerprint and hash differs per output + for (output_name, output) in self.outputs.iter_mut() { + // Assert that outputs are not yet populated, to avoid using this function wrongly. + // We don't also go over self.environment, but it's a sufficient + // footgun prevention mechanism. + assert!(output.path.is_empty()); + + let path_name = output_path_name(name, output_name); + + // For fixed output derivation we use the per-output info, otherwise we use the + // derivation hash. + let abs_store_path = if let Some(ref hwm) = output.hash_with_mode { + build_regular_ca_path(&path_name, hwm, Vec::<String>::new(), false).map_err( + |e| DerivationError::InvalidOutputDerivationPath(output_name.to_string(), e), + )? + } else { + build_output_path(derivation_or_fod_hash, output_name, &path_name).map_err(|e| { + DerivationError::InvalidOutputDerivationPath( + output_name.to_string(), + store_path::BuildStorePathError::InvalidName(e), + ) + })? + }; + + output.path = abs_store_path.to_absolute_path(); + self.environment + .insert(output_name.to_string(), abs_store_path.to_absolute_path()); + } + + Ok(()) + } +} + +/// Calculate the name part of the store path of a derivation [Output]. +/// +/// It's the name, and (if it's the non-out output), the output name +/// after a `-`. +fn output_path_name(derivation_name: &str, output_name: &str) -> String { + let mut output_path_name = derivation_name.to_string(); + if output_name != "out" { + output_path_name.push('-'); + output_path_name.push_str(output_name); + } + output_path_name +} diff --git a/tvix/nix-compat/src/derivation/output.rs b/tvix/nix-compat/src/derivation/output.rs new file mode 100644 index 000000000000..4bfc7bf8014d --- /dev/null +++ b/tvix/nix-compat/src/derivation/output.rs @@ -0,0 +1,36 @@ +use crate::derivation::OutputError; +use crate::nixhash::{HashAlgo, NixHashWithMode}; +use crate::store_path::StorePath; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize)] +pub struct Output { + pub path: String, + + #[serde(flatten)] + pub hash_with_mode: Option<NixHashWithMode>, +} + +impl Output { + pub fn is_fixed(&self) -> bool { + self.hash_with_mode.is_some() + } + + pub fn validate(&self, validate_output_paths: bool) -> Result<(), OutputError> { + if let Some(hash) = &self.hash_with_mode { + match hash { + NixHashWithMode::Flat(h) | NixHashWithMode::Recursive(h) => { + if h.algo != HashAlgo::Sha1 || h.algo != HashAlgo::Sha256 { + return Err(OutputError::InvalidHashAlgo(h.algo.to_string())); + } + } + } + } + if validate_output_paths { + if let Err(e) = StorePath::from_absolute_path(&self.path) { + return Err(OutputError::InvalidOutputPath(self.path.to_string(), e)); + } + } + Ok(()) + } +} diff --git a/tvix/nix-compat/src/derivation/string_escape.rs b/tvix/nix-compat/src/derivation/string_escape.rs new file mode 100644 index 000000000000..0e1dbe516f73 --- /dev/null +++ b/tvix/nix-compat/src/derivation/string_escape.rs @@ -0,0 +1,17 @@ +const STRING_ESCAPER: [(char, &str); 5] = [ + ('\\', "\\\\"), + ('\n', "\\n"), + ('\r', "\\r"), + ('\t', "\\t"), + ('\"', "\\\""), +]; + +pub fn escape_string(s: &str) -> String { + let mut s_replaced = s.to_string(); + + for escape_sequence in STRING_ESCAPER { + s_replaced = s_replaced.replace(escape_sequence.0, escape_sequence.1); + } + + format!("\"{}\"", s_replaced) +} diff --git a/tvix/nix-compat/src/derivation/tests/derivation_tests/0hm2f1psjpcwg8fijsmr4wwxrx59s092-bar.drv b/tvix/nix-compat/src/derivation/tests/derivation_tests/0hm2f1psjpcwg8fijsmr4wwxrx59s092-bar.drv new file mode 100644 index 000000000000..a4fea3c5f486 --- /dev/null +++ b/tvix/nix-compat/src/derivation/tests/derivation_tests/0hm2f1psjpcwg8fijsmr4wwxrx59s092-bar.drv @@ -0,0 +1 @@ +Derive([("out","/nix/store/4q0pg5zpfmznxscq3avycvf9xdvx50n3-bar","r:sha256","08813cbee9903c62be4c5027726a418a300da4500b2d369d3af9286f4815ceba")],[],[],":",":",[],[("builder",":"),("name","bar"),("out","/nix/store/4q0pg5zpfmznxscq3avycvf9xdvx50n3-bar"),("outputHash","08813cbee9903c62be4c5027726a418a300da4500b2d369d3af9286f4815ceba"),("outputHashAlgo","sha256"),("outputHashMode","recursive"),("system",":")]) \ No newline at end of file diff --git a/tvix/nix-compat/src/derivation/tests/derivation_tests/0hm2f1psjpcwg8fijsmr4wwxrx59s092-bar.drv.json b/tvix/nix-compat/src/derivation/tests/derivation_tests/0hm2f1psjpcwg8fijsmr4wwxrx59s092-bar.drv.json new file mode 100644 index 000000000000..c8bbc4cbb5be --- /dev/null +++ b/tvix/nix-compat/src/derivation/tests/derivation_tests/0hm2f1psjpcwg8fijsmr4wwxrx59s092-bar.drv.json @@ -0,0 +1,23 @@ +{ + "args": [], + "builder": ":", + "env": { + "builder": ":", + "name": "bar", + "out": "/nix/store/4q0pg5zpfmznxscq3avycvf9xdvx50n3-bar", + "outputHash": "08813cbee9903c62be4c5027726a418a300da4500b2d369d3af9286f4815ceba", + "outputHashAlgo": "sha256", + "outputHashMode": "recursive", + "system": ":" + }, + "inputDrvs": {}, + "inputSrcs": [], + "outputs": { + "out": { + "hash": "08813cbee9903c62be4c5027726a418a300da4500b2d369d3af9286f4815ceba", + "hashAlgo": "r:sha256", + "path": "/nix/store/4q0pg5zpfmznxscq3avycvf9xdvx50n3-bar" + } + }, + "system": ":" +} diff --git a/tvix/nix-compat/src/derivation/tests/derivation_tests/292w8yzv5nn7nhdpxcs8b7vby2p27s09-nested-json.drv b/tvix/nix-compat/src/derivation/tests/derivation_tests/292w8yzv5nn7nhdpxcs8b7vby2p27s09-nested-json.drv new file mode 100644 index 000000000000..f0d9230a5a52 --- /dev/null +++ b/tvix/nix-compat/src/derivation/tests/derivation_tests/292w8yzv5nn7nhdpxcs8b7vby2p27s09-nested-json.drv @@ -0,0 +1 @@ +Derive([("out","/nix/store/pzr7lsd3q9pqsnb42r9b23jc5sh8irvn-nested-json","","")],[],[],":",":",[],[("builder",":"),("json","{\"hello\":\"moto\\n\"}"),("name","nested-json"),("out","/nix/store/pzr7lsd3q9pqsnb42r9b23jc5sh8irvn-nested-json"),("system",":")]) \ No newline at end of file diff --git a/tvix/nix-compat/src/derivation/tests/derivation_tests/292w8yzv5nn7nhdpxcs8b7vby2p27s09-nested-json.drv.json b/tvix/nix-compat/src/derivation/tests/derivation_tests/292w8yzv5nn7nhdpxcs8b7vby2p27s09-nested-json.drv.json new file mode 100644 index 000000000000..9cb0b43b4c09 --- /dev/null +++ b/tvix/nix-compat/src/derivation/tests/derivation_tests/292w8yzv5nn7nhdpxcs8b7vby2p27s09-nested-json.drv.json @@ -0,0 +1,19 @@ +{ + "args": [], + "builder": ":", + "env": { + "builder": ":", + "json": "{\"hello\":\"moto\\n\"}", + "name": "nested-json", + "out": "/nix/store/pzr7lsd3q9pqsnb42r9b23jc5sh8irvn-nested-json", + "system": ":" + }, + "inputDrvs": {}, + "inputSrcs": [], + "outputs": { + "out": { + "path": "/nix/store/pzr7lsd3q9pqsnb42r9b23jc5sh8irvn-nested-json" + } + }, + "system": ":" +} diff --git a/tvix/nix-compat/src/derivation/tests/derivation_tests/4wvvbi4jwn0prsdxb7vs673qa5h9gr7x-foo.drv b/tvix/nix-compat/src/derivation/tests/derivation_tests/4wvvbi4jwn0prsdxb7vs673qa5h9gr7x-foo.drv new file mode 100644 index 000000000000..a2cf9d31f92e --- /dev/null +++ b/tvix/nix-compat/src/derivation/tests/derivation_tests/4wvvbi4jwn0prsdxb7vs673qa5h9gr7x-foo.drv @@ -0,0 +1 @@ +Derive([("out","/nix/store/5vyvcwah9l9kf07d52rcgdk70g2f4y13-foo","","")],[("/nix/store/0hm2f1psjpcwg8fijsmr4wwxrx59s092-bar.drv",["out"])],[],":",":",[],[("bar","/nix/store/4q0pg5zpfmznxscq3avycvf9xdvx50n3-bar"),("builder",":"),("name","foo"),("out","/nix/store/5vyvcwah9l9kf07d52rcgdk70g2f4y13-foo"),("system",":")]) \ No newline at end of file diff --git a/tvix/nix-compat/src/derivation/tests/derivation_tests/4wvvbi4jwn0prsdxb7vs673qa5h9gr7x-foo.drv.json b/tvix/nix-compat/src/derivation/tests/derivation_tests/4wvvbi4jwn0prsdxb7vs673qa5h9gr7x-foo.drv.json new file mode 100644 index 000000000000..957a85ccab82 --- /dev/null +++ b/tvix/nix-compat/src/derivation/tests/derivation_tests/4wvvbi4jwn0prsdxb7vs673qa5h9gr7x-foo.drv.json @@ -0,0 +1,23 @@ +{ + "args": [], + "builder": ":", + "env": { + "bar": "/nix/store/4q0pg5zpfmznxscq3avycvf9xdvx50n3-bar", + "builder": ":", + "name": "foo", + "out": "/nix/store/5vyvcwah9l9kf07d52rcgdk70g2f4y13-foo", + "system": ":" + }, + "inputDrvs": { + "/nix/store/0hm2f1psjpcwg8fijsmr4wwxrx59s092-bar.drv": [ + "out" + ] + }, + "inputSrcs": [], + "outputs": { + "out": { + "path": "/nix/store/5vyvcwah9l9kf07d52rcgdk70g2f4y13-foo" + } + }, + "system": ":" +} diff --git a/tvix/nix-compat/src/derivation/tests/derivation_tests/52a9id8hx688hvlnz4d1n25ml1jdykz0-unicode.drv b/tvix/nix-compat/src/derivation/tests/derivation_tests/52a9id8hx688hvlnz4d1n25ml1jdykz0-unicode.drv new file mode 100644 index 000000000000..bbe88c02c739 --- /dev/null +++ b/tvix/nix-compat/src/derivation/tests/derivation_tests/52a9id8hx688hvlnz4d1n25ml1jdykz0-unicode.drv @@ -0,0 +1 @@ +Derive([("out","/nix/store/vgvdj6nf7s8kvfbl2skbpwz9kc7xjazc-unicode","","")],[],[],":",":",[],[("builder",":"),("letters","räksmörgås\nrødgrød med fløde\nLübeck\n肥猪\nこんにちは / 今日は\n🌮\n"),("name","unicode"),("out","/nix/store/vgvdj6nf7s8kvfbl2skbpwz9kc7xjazc-unicode"),("system",":")]) \ No newline at end of file diff --git a/tvix/nix-compat/src/derivation/tests/derivation_tests/52a9id8hx688hvlnz4d1n25ml1jdykz0-unicode.drv.json b/tvix/nix-compat/src/derivation/tests/derivation_tests/52a9id8hx688hvlnz4d1n25ml1jdykz0-unicode.drv.json new file mode 100644 index 000000000000..f8f33c1bba17 --- /dev/null +++ b/tvix/nix-compat/src/derivation/tests/derivation_tests/52a9id8hx688hvlnz4d1n25ml1jdykz0-unicode.drv.json @@ -0,0 +1,19 @@ +{ + "outputs": { + "out": { + "path": "/nix/store/vgvdj6nf7s8kvfbl2skbpwz9kc7xjazc-unicode" + } + }, + "inputSrcs": [], + "inputDrvs": {}, + "system": ":", + "builder": ":", + "args": [], + "env": { + "builder": ":", + "letters": "räksmörgås\nrødgrød med fløde\nLübeck\n肥猪\nこんにちは / 今日は\n🌮\n", + "name": "unicode", + "out": "/nix/store/vgvdj6nf7s8kvfbl2skbpwz9kc7xjazc-unicode", + "system": ":" + } +} diff --git a/tvix/nix-compat/src/derivation/tests/derivation_tests/9lj1lkjm2ag622mh4h9rpy6j607an8g2-structured-attrs.drv b/tvix/nix-compat/src/derivation/tests/derivation_tests/9lj1lkjm2ag622mh4h9rpy6j607an8g2-structured-attrs.drv new file mode 100644 index 000000000000..4b9338c0b953 --- /dev/null +++ b/tvix/nix-compat/src/derivation/tests/derivation_tests/9lj1lkjm2ag622mh4h9rpy6j607an8g2-structured-attrs.drv @@ -0,0 +1 @@ +Derive([("out","/nix/store/6a39dl014j57bqka7qx25k0vb20vkqm6-structured-attrs","","")],[],[],":",":",[],[("__json","{\"builder\":\":\",\"name\":\"structured-attrs\",\"system\":\":\"}"),("out","/nix/store/6a39dl014j57bqka7qx25k0vb20vkqm6-structured-attrs")]) \ No newline at end of file diff --git a/tvix/nix-compat/src/derivation/tests/derivation_tests/9lj1lkjm2ag622mh4h9rpy6j607an8g2-structured-attrs.drv.json b/tvix/nix-compat/src/derivation/tests/derivation_tests/9lj1lkjm2ag622mh4h9rpy6j607an8g2-structured-attrs.drv.json new file mode 100644 index 000000000000..74e3d7df55c5 --- /dev/null +++ b/tvix/nix-compat/src/derivation/tests/derivation_tests/9lj1lkjm2ag622mh4h9rpy6j607an8g2-structured-attrs.drv.json @@ -0,0 +1,16 @@ +{ + "args": [], + "builder": ":", + "env": { + "__json": "{\"builder\":\":\",\"name\":\"structured-attrs\",\"system\":\":\"}", + "out": "/nix/store/6a39dl014j57bqka7qx25k0vb20vkqm6-structured-attrs" + }, + "inputDrvs": {}, + "inputSrcs": [], + "outputs": { + "out": { + "path": "/nix/store/6a39dl014j57bqka7qx25k0vb20vkqm6-structured-attrs" + } + }, + "system": ":" +} diff --git a/tvix/nix-compat/src/derivation/tests/derivation_tests/ch49594n9avinrf8ip0aslidkc4lxkqv-foo.drv b/tvix/nix-compat/src/derivation/tests/derivation_tests/ch49594n9avinrf8ip0aslidkc4lxkqv-foo.drv new file mode 100644 index 000000000000..1699c2a75e48 --- /dev/null +++ b/tvix/nix-compat/src/derivation/tests/derivation_tests/ch49594n9avinrf8ip0aslidkc4lxkqv-foo.drv @@ -0,0 +1 @@ +Derive([("out","/nix/store/fhaj6gmwns62s6ypkcldbaj2ybvkhx3p-foo","","")],[("/nix/store/ss2p4wmxijn652haqyd7dckxwl4c7hxx-bar.drv",["out"])],[],":",":",[],[("bar","/nix/store/mp57d33657rf34lzvlbpfa1gjfv5gmpg-bar"),("builder",":"),("name","foo"),("out","/nix/store/fhaj6gmwns62s6ypkcldbaj2ybvkhx3p-foo"),("system",":")]) \ No newline at end of file diff --git a/tvix/nix-compat/src/derivation/tests/derivation_tests/ch49594n9avinrf8ip0aslidkc4lxkqv-foo.drv.json b/tvix/nix-compat/src/derivation/tests/derivation_tests/ch49594n9avinrf8ip0aslidkc4lxkqv-foo.drv.json new file mode 100644 index 000000000000..831d27956d86 --- /dev/null +++ b/tvix/nix-compat/src/derivation/tests/derivation_tests/ch49594n9avinrf8ip0aslidkc4lxkqv-foo.drv.json @@ -0,0 +1,23 @@ +{ + "args": [], + "builder": ":", + "env": { + "bar": "/nix/store/mp57d33657rf34lzvlbpfa1gjfv5gmpg-bar", + "builder": ":", + "name": "foo", + "out": "/nix/store/fhaj6gmwns62s6ypkcldbaj2ybvkhx3p-foo", + "system": ":" + }, + "inputDrvs": { + "/nix/store/ss2p4wmxijn652haqyd7dckxwl4c7hxx-bar.drv": [ + "out" + ] + }, + "inputSrcs": [], + "outputs": { + "out": { + "path": "/nix/store/fhaj6gmwns62s6ypkcldbaj2ybvkhx3p-foo" + } + }, + "system": ":" +} diff --git a/tvix/nix-compat/src/derivation/tests/derivation_tests/h32dahq0bx5rp1krcdx3a53asj21jvhk-has-multi-out.drv b/tvix/nix-compat/src/derivation/tests/derivation_tests/h32dahq0bx5rp1krcdx3a53asj21jvhk-has-multi-out.drv new file mode 100644 index 000000000000..523612238c76 --- /dev/null +++ b/tvix/nix-compat/src/derivation/tests/derivation_tests/h32dahq0bx5rp1krcdx3a53asj21jvhk-has-multi-out.drv @@ -0,0 +1 @@ +Derive([("lib","/nix/store/2vixb94v0hy2xc6p7mbnxxcyc095yyia-has-multi-out-lib","",""),("out","/nix/store/55lwldka5nyxa08wnvlizyqw02ihy8ic-has-multi-out","","")],[],[],":",":",[],[("builder",":"),("lib","/nix/store/2vixb94v0hy2xc6p7mbnxxcyc095yyia-has-multi-out-lib"),("name","has-multi-out"),("out","/nix/store/55lwldka5nyxa08wnvlizyqw02ihy8ic-has-multi-out"),("outputs","out lib"),("system",":")]) \ No newline at end of file diff --git a/tvix/nix-compat/src/derivation/tests/derivation_tests/h32dahq0bx5rp1krcdx3a53asj21jvhk-has-multi-out.drv.json b/tvix/nix-compat/src/derivation/tests/derivation_tests/h32dahq0bx5rp1krcdx3a53asj21jvhk-has-multi-out.drv.json new file mode 100644 index 000000000000..0bd7a2991cc7 --- /dev/null +++ b/tvix/nix-compat/src/derivation/tests/derivation_tests/h32dahq0bx5rp1krcdx3a53asj21jvhk-has-multi-out.drv.json @@ -0,0 +1,23 @@ +{ + "args": [], + "builder": ":", + "env": { + "builder": ":", + "lib": "/nix/store/2vixb94v0hy2xc6p7mbnxxcyc095yyia-has-multi-out-lib", + "name": "has-multi-out", + "out": "/nix/store/55lwldka5nyxa08wnvlizyqw02ihy8ic-has-multi-out", + "outputs": "out lib", + "system": ":" + }, + "inputDrvs": {}, + "inputSrcs": [], + "outputs": { + "lib": { + "path": "/nix/store/2vixb94v0hy2xc6p7mbnxxcyc095yyia-has-multi-out-lib" + }, + "out": { + "path": "/nix/store/55lwldka5nyxa08wnvlizyqw02ihy8ic-has-multi-out" + } + }, + "system": ":" +} diff --git a/tvix/nix-compat/src/derivation/tests/derivation_tests/ss2p4wmxijn652haqyd7dckxwl4c7hxx-bar.drv b/tvix/nix-compat/src/derivation/tests/derivation_tests/ss2p4wmxijn652haqyd7dckxwl4c7hxx-bar.drv new file mode 100644 index 000000000000..559e93ed0ed6 --- /dev/null +++ b/tvix/nix-compat/src/derivation/tests/derivation_tests/ss2p4wmxijn652haqyd7dckxwl4c7hxx-bar.drv @@ -0,0 +1 @@ +Derive([("out","/nix/store/mp57d33657rf34lzvlbpfa1gjfv5gmpg-bar","r:sha1","0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33")],[],[],":",":",[],[("builder",":"),("name","bar"),("out","/nix/store/mp57d33657rf34lzvlbpfa1gjfv5gmpg-bar"),("outputHash","0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33"),("outputHashAlgo","sha1"),("outputHashMode","recursive"),("system",":")]) \ No newline at end of file diff --git a/tvix/nix-compat/src/derivation/tests/derivation_tests/ss2p4wmxijn652haqyd7dckxwl4c7hxx-bar.drv.json b/tvix/nix-compat/src/derivation/tests/derivation_tests/ss2p4wmxijn652haqyd7dckxwl4c7hxx-bar.drv.json new file mode 100644 index 000000000000..e297d271592f --- /dev/null +++ b/tvix/nix-compat/src/derivation/tests/derivation_tests/ss2p4wmxijn652haqyd7dckxwl4c7hxx-bar.drv.json @@ -0,0 +1,23 @@ +{ + "args": [], + "builder": ":", + "env": { + "builder": ":", + "name": "bar", + "out": "/nix/store/mp57d33657rf34lzvlbpfa1gjfv5gmpg-bar", + "outputHash": "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33", + "outputHashAlgo": "sha1", + "outputHashMode": "recursive", + "system": ":" + }, + "inputDrvs": {}, + "inputSrcs": [], + "outputs": { + "out": { + "hash": "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33", + "hashAlgo": "r:sha1", + "path": "/nix/store/mp57d33657rf34lzvlbpfa1gjfv5gmpg-bar" + } + }, + "system": ":" +} diff --git a/tvix/nix-compat/src/derivation/tests/mod.rs b/tvix/nix-compat/src/derivation/tests/mod.rs new file mode 100644 index 000000000000..5daa16da03cb --- /dev/null +++ b/tvix/nix-compat/src/derivation/tests/mod.rs @@ -0,0 +1,315 @@ +use crate::derivation::output::Output; +use crate::derivation::Derivation; +use crate::nixhash::NixHash; +use crate::store_path::StorePath; +use std::collections::BTreeSet; +use std::fs::File; +use std::io::Read; +use std::path::Path; +use test_case::test_case; +use test_generator::test_resources; + +const RESOURCES_PATHS: &str = "src/derivation/tests/derivation_tests"; + +fn read_file(path: &str) -> String { + let path = Path::new(path); + let mut file = File::open(path).unwrap(); + let mut data = String::new(); + + file.read_to_string(&mut data).unwrap(); + + data +} + +#[test_resources("src/derivation/tests/derivation_tests/*.drv")] +fn check_serizaliation(path_to_drv_file: &str) { + let data = read_file(&format!("{}.json", path_to_drv_file)); + let derivation: Derivation = serde_json::from_str(&data).expect("JSON was not well-formatted"); + + let mut serialized_derivation = String::new(); + derivation.serialize(&mut serialized_derivation).unwrap(); + + let expected = read_file(path_to_drv_file); + + assert_eq!(expected, serialized_derivation); +} + +#[test_resources("src/derivation/tests/derivation_tests/*.drv")] +fn validate(path_to_drv_file: &str) { + let data = read_file(&format!("{}.json", path_to_drv_file)); + let derivation: Derivation = serde_json::from_str(&data).expect("JSON was not well-formatted"); + + derivation + .validate(true) + .expect("derivation failed to validate") +} + +#[test_resources("src/derivation/tests/derivation_tests/*.drv")] +fn check_to_aterm_string(path_to_drv_file: &str) { + let data = read_file(&format!("{}.json", path_to_drv_file)); + let derivation: Derivation = serde_json::from_str(&data).expect("JSON was not well-formatted"); + + let expected = read_file(path_to_drv_file); + + assert_eq!(expected, derivation.to_aterm_string()); +} + +#[test_case("bar","0hm2f1psjpcwg8fijsmr4wwxrx59s092-bar.drv"; "fixed_sha256")] +#[test_case("foo", "4wvvbi4jwn0prsdxb7vs673qa5h9gr7x-foo.drv"; "simple-sha256")] +#[test_case("bar", "ss2p4wmxijn652haqyd7dckxwl4c7hxx-bar.drv"; "fixed-sha1")] +#[test_case("foo", "ch49594n9avinrf8ip0aslidkc4lxkqv-foo.drv"; "simple-sha1")] +#[test_case("has-multi-out", "h32dahq0bx5rp1krcdx3a53asj21jvhk-has-multi-out.drv"; "multiple-outputs")] +#[test_case("structured-attrs", "9lj1lkjm2ag622mh4h9rpy6j607an8g2-structured-attrs.drv"; "structured-attrs")] +#[test_case("unicode", "52a9id8hx688hvlnz4d1n25ml1jdykz0-unicode.drv"; "unicode")] +fn derivation_path(name: &str, expected_path: &str) { + let data = read_file(&format!("{}/{}.json", RESOURCES_PATHS, expected_path)); + let derivation: Derivation = serde_json::from_str(&data).expect("JSON was not well-formatted"); + + assert_eq!( + derivation.calculate_derivation_path(name).unwrap(), + StorePath::from_string(expected_path).unwrap() + ); +} + +/// This trims all output paths from a Derivation struct, +/// by setting outputs[$outputName].path and environment[$outputName] to the empty string. +fn derivation_with_trimmed_output_paths(derivation: &Derivation) -> Derivation { + let mut trimmed_env = derivation.environment.clone(); + let mut trimmed_outputs = derivation.outputs.clone(); + + for (output_name, output) in &derivation.outputs { + trimmed_env.insert(output_name.clone(), "".to_string()); + assert!(trimmed_outputs.contains_key(output_name)); + trimmed_outputs.insert( + output_name.to_string(), + Output { + path: "".to_string(), + ..output.clone() + }, + ); + } + + // replace environment and outputs with the trimmed variants + Derivation { + environment: trimmed_env, + outputs: trimmed_outputs, + ..derivation.clone() + } +} + +#[test_case("0hm2f1psjpcwg8fijsmr4wwxrx59s092-bar.drv", "sha256:724f3e3634fce4cbbbd3483287b8798588e80280660b9a63fd13a1bc90485b33"; "fixed_sha256")] +#[test_case("ss2p4wmxijn652haqyd7dckxwl4c7hxx-bar.drv", "sha256:c79aebd0ce3269393d4a1fde2cbd1d975d879b40f0bf40a48f550edc107fd5df";"fixed-sha1")] +fn derivation_or_fod_hash(drv_path: &str, expected_nix_hash_string: &str) { + // read in the fixture + let data = read_file(&format!("{}/{}.json", RESOURCES_PATHS, drv_path)); + let drv: Derivation = serde_json::from_str(&data).expect("must deserialize"); + + let actual = drv.derivation_or_fod_hash(|_| panic!("must not be called")); + + assert_eq!(expected_nix_hash_string, actual.to_nix_hash_string()); +} + +#[test_case("bar","0hm2f1psjpcwg8fijsmr4wwxrx59s092-bar.drv"; "fixed_sha256")] +#[test_case("foo", "4wvvbi4jwn0prsdxb7vs673qa5h9gr7x-foo.drv"; "simple-sha256")] +#[test_case("bar", "ss2p4wmxijn652haqyd7dckxwl4c7hxx-bar.drv"; "fixed-sha1")] +#[test_case("foo", "ch49594n9avinrf8ip0aslidkc4lxkqv-foo.drv"; "simple-sha1")] +#[test_case("has-multi-out", "h32dahq0bx5rp1krcdx3a53asj21jvhk-has-multi-out.drv"; "multiple-outputs")] +#[test_case("structured-attrs", "9lj1lkjm2ag622mh4h9rpy6j607an8g2-structured-attrs.drv"; "structured-attrs")] +#[test_case("unicode", "52a9id8hx688hvlnz4d1n25ml1jdykz0-unicode.drv"; "unicode")] +fn output_paths(name: &str, drv_path: &str) { + // read in the fixture + let data = read_file(&format!("{}/{}.json", RESOURCES_PATHS, drv_path)); + let expected_derivation: Derivation = serde_json::from_str(&data).expect("must deserialize"); + + let mut derivation = derivation_with_trimmed_output_paths(&expected_derivation); + + // calculate the derivation_or_fod_hash of derivation + // We don't expect the lookup function to be called for most derivations. + let calculated_derivation_or_fod_hash = derivation.derivation_or_fod_hash(|parent_drv_path| { + // 4wvvbi4jwn0prsdxb7vs673qa5h9gr7x-foo.drv may lookup /nix/store/0hm2f1psjpcwg8fijsmr4wwxrx59s092-bar.drv + // ch49594n9avinrf8ip0aslidkc4lxkqv-foo.drv may lookup /nix/store/ss2p4wmxijn652haqyd7dckxwl4c7hxx-bar.drv + if name == "foo" + && ((drv_path == "4wvvbi4jwn0prsdxb7vs673qa5h9gr7x-foo.drv" + && parent_drv_path == "/nix/store/0hm2f1psjpcwg8fijsmr4wwxrx59s092-bar.drv") + || (drv_path == "ch49594n9avinrf8ip0aslidkc4lxkqv-foo.drv" + && parent_drv_path == "/nix/store/ss2p4wmxijn652haqyd7dckxwl4c7hxx-bar.drv")) + { + // do the lookup, by reading in the fixture of the requested + // drv_name, and calculating its drv replacement (on the non-stripped version) + // In a real-world scenario you would have already done this during construction. + + let data = read_file(&format!( + "{}/{}.json", + RESOURCES_PATHS, + Path::new(parent_drv_path) + .file_name() + .unwrap() + .to_string_lossy() + )); + + let drv: Derivation = serde_json::from_str(&data).expect("must deserialize"); + + // calculate derivation_or_fod_hash for each parent. + // This may not trigger subsequent requests, as both parents are FOD. + drv.derivation_or_fod_hash(|_| panic!("must not lookup")) + } else { + // we only expect this to be called in the "foo" testcase, for the "bar derivations" + panic!("may only be called for foo testcase on bar derivations"); + } + }); + + derivation + .calculate_output_paths(name, &calculated_derivation_or_fod_hash) + .unwrap(); + + // The derivation should now look like it was before + assert_eq!(expected_derivation, derivation); +} + +/// Exercises the output path calculation functions like a constructing client +/// (an implementation of builtins.derivation) would do: +/// +/// ```nix +/// rec { +/// bar = builtins.derivation { +/// name = "bar"; +/// builder = ":"; +/// system = ":"; +/// outputHash = "08813cbee9903c62be4c5027726a418a300da4500b2d369d3af9286f4815ceba"; +/// outputHashAlgo = "sha256"; +/// outputHashMode = "recursive"; +/// }; +/// +/// foo = builtins.derivation { +/// name = "foo"; +/// builder = ":"; +/// system = ":"; +/// inherit bar; +/// }; +/// } +/// ``` +/// It first assembles the bar derivation, does the output path calculation on +/// it, then continues with the foo derivation. +/// +/// The code ensures the resulting Derivations match our fixtures. +#[test] +fn output_path_construction() { + // create the bar derivation + let mut bar_drv = Derivation { + builder: ":".to_string(), + system: ":".to_string(), + ..Default::default() + }; + + // assemble bar env + let bar_env = &mut bar_drv.environment; + bar_env.insert("builder".to_string(), ":".to_string()); + bar_env.insert("name".to_string(), "bar".to_string()); + bar_env.insert("out".to_string(), "".to_string()); // will be calculated + bar_env.insert( + "outputHash".to_string(), + "08813cbee9903c62be4c5027726a418a300da4500b2d369d3af9286f4815ceba".to_string(), + ); + bar_env.insert("outputHashAlgo".to_string(), "sha256".to_string()); + bar_env.insert("outputHashMode".to_string(), "recursive".to_string()); + bar_env.insert("system".to_string(), ":".to_string()); + + // assemble bar outputs + bar_drv.outputs.insert( + "out".to_string(), + Output { + path: "".to_string(), // will be calculated + hash_with_mode: Some(crate::nixhash::NixHashWithMode::Recursive(NixHash { + digest: data_encoding::HEXLOWER + .decode( + "08813cbee9903c62be4c5027726a418a300da4500b2d369d3af9286f4815ceba" + .as_bytes(), + ) + .unwrap(), + algo: crate::nixhash::HashAlgo::Sha256, + })), + }, + ); + + // calculate bar output paths + let bar_calc_result = bar_drv.calculate_output_paths( + "bar", + &bar_drv.derivation_or_fod_hash(|_| panic!("is FOD, should not lookup")), + ); + assert!(bar_calc_result.is_ok()); + + // ensure it matches our bar fixture + let bar_data = read_file(&format!( + "{}/{}.json", + RESOURCES_PATHS, "0hm2f1psjpcwg8fijsmr4wwxrx59s092-bar.drv" + )); + let bar_drv_expected: Derivation = serde_json::from_str(&bar_data).expect("must deserialize"); + assert_eq!(bar_drv_expected, bar_drv); + + // now construct foo, which requires bar_drv + // Note how we refer to the output path, drv name and replacement_str (with calculated output paths) of bar. + let bar_output_path = &bar_drv.outputs.get("out").expect("must exist").path; + let bar_drv_derivation_or_fod_hash = + bar_drv.derivation_or_fod_hash(|_| panic!("is FOD, should not lookup")); + + let bar_drv_path = bar_drv + .calculate_derivation_path("bar") + .expect("must succeed"); + + // create foo derivation + let mut foo_drv = Derivation { + builder: ":".to_string(), + system: ":".to_string(), + ..Default::default() + }; + + // assemble foo env + let foo_env = &mut foo_drv.environment; + foo_env.insert("bar".to_string(), bar_output_path.to_string()); + foo_env.insert("builder".to_string(), ":".to_string()); + foo_env.insert("name".to_string(), "foo".to_string()); + foo_env.insert("out".to_string(), "".to_string()); // will be calculated + foo_env.insert("system".to_string(), ":".to_string()); + + // asssemble foo outputs + foo_drv.outputs.insert( + "out".to_string(), + Output { + path: "".to_string(), // will be calculated + hash_with_mode: None, + }, + ); + + // assemble foo input_derivations + foo_drv.input_derivations.insert( + bar_drv_path.to_absolute_path(), + BTreeSet::from(["out".to_string()]), + ); + + // calculate foo output paths + let foo_calc_result = foo_drv.calculate_output_paths( + "foo", + &foo_drv.derivation_or_fod_hash(|drv_path| { + if drv_path != "/nix/store/0hm2f1psjpcwg8fijsmr4wwxrx59s092-bar.drv" { + panic!("lookup called with unexpected drv_path: {}", drv_path); + } + bar_drv_derivation_or_fod_hash.clone() + }), + ); + assert!(foo_calc_result.is_ok()); + + // ensure it matches our foo fixture + let foo_data = read_file(&format!( + "{}/{}.json", + RESOURCES_PATHS, "4wvvbi4jwn0prsdxb7vs673qa5h9gr7x-foo.drv", + )); + let foo_drv_expected: Derivation = serde_json::from_str(&foo_data).expect("must deserialize"); + assert_eq!(foo_drv_expected, foo_drv); + + assert_eq!( + StorePath::from_string("4wvvbi4jwn0prsdxb7vs673qa5h9gr7x-foo.drv").expect("must succeed"), + foo_drv + .calculate_derivation_path("foo") + .expect("must succeed") + ); +} diff --git a/tvix/nix-compat/src/derivation/validate.rs b/tvix/nix-compat/src/derivation/validate.rs new file mode 100644 index 000000000000..d8dc24a92ae1 --- /dev/null +++ b/tvix/nix-compat/src/derivation/validate.rs @@ -0,0 +1,129 @@ +use crate::derivation::{Derivation, DerivationError}; +use crate::store_path::StorePath; + +impl Derivation { + /// validate ensures a Derivation struct is properly populated, + /// and returns a [DerivationError] if not. + /// + /// if `validate_output_paths` is set to false, the output paths are + /// excluded from validation. + /// + /// This is helpful to validate struct population before invoking + /// [Derivation::calculate_output_paths]. + pub fn validate(&self, validate_output_paths: bool) -> Result<(), DerivationError> { + // Ensure the number of outputs is > 1 + if self.outputs.is_empty() { + return Err(DerivationError::NoOutputs()); + } + + // Validate all outputs + for (output_name, output) in &self.outputs { + // empty output names are invalid. + // + // `drv` is an invalid output name too, as this would cause + // a `builtins.derivation` call to return an attrset with a + // `drvPath` key (which already exists) and has a different + // meaning. + // + // Other output names that don't match the name restrictions from + // [StorePath] will fail the [StorePath::validate_name] check. + if output_name.is_empty() + || output_name == "drv" + || StorePath::validate_name(output_name).is_err() + { + return Err(DerivationError::InvalidOutputName(output_name.to_string())); + } + + if output.is_fixed() { + if self.outputs.len() != 1 { + return Err(DerivationError::MoreThanOneOutputButFixed()); + } + if output_name != "out" { + return Err(DerivationError::InvalidOutputNameForFixed( + output_name.to_string(), + )); + } + + break; + } + + if let Err(e) = output.validate(validate_output_paths) { + return Err(DerivationError::InvalidOutput(output_name.to_string(), e)); + } + } + + // Validate all input_derivations + for (input_derivation_path, output_names) in &self.input_derivations { + // Validate input_derivation_path + if let Err(e) = StorePath::from_absolute_path(input_derivation_path) { + return Err(DerivationError::InvalidInputDerivationPath( + input_derivation_path.to_string(), + e, + )); + } + + if !input_derivation_path.ends_with(".drv") { + return Err(DerivationError::InvalidInputDerivationPrefix( + input_derivation_path.to_string(), + )); + } + + if output_names.is_empty() { + return Err(DerivationError::EmptyInputDerivationOutputNames( + input_derivation_path.to_string(), + )); + } + + for output_name in output_names.iter() { + // empty output names are invalid. + // + // `drv` is an invalid output name too, as this would cause + // a `builtins.derivation` call to return an attrset with a + // `drvPath` key (which already exists) and has a different + // meaning. + // + // Other output names that don't match the name restrictions from + // [StorePath] will fail the [StorePath::validate_name] check. + if output_name.is_empty() + || output_name == "drv" + || StorePath::validate_name(output_name).is_err() + { + return Err(DerivationError::InvalidInputDerivationOutputName( + input_derivation_path.to_string(), + output_name.to_string(), + )); + } + } + } + + // Validate all input_sources + for input_source in self.input_sources.iter() { + if let Err(e) = StorePath::from_absolute_path(input_source) { + return Err(DerivationError::InvalidInputSourcesPath( + input_source.to_string(), + e, + )); + } + } + + // validate platform + if self.system.is_empty() { + return Err(DerivationError::InvalidPlatform(self.system.to_string())); + } + + // validate builder + if self.builder.is_empty() { + return Err(DerivationError::InvalidBuilder(self.builder.to_string())); + } + + // validate env, none of the keys may be empty. + // We skip the `name` validation seen in go-nix. + for k in self.environment.keys() { + if k.is_empty() { + return Err(DerivationError::InvalidEnvironmentKey(k.to_string())); + } + } + + Ok(()) + } +} diff --git a/tvix/nix-compat/src/derivation/write.rs b/tvix/nix-compat/src/derivation/write.rs new file mode 100644 index 000000000000..52166294e078 --- /dev/null +++ b/tvix/nix-compat/src/derivation/write.rs @@ -0,0 +1,190 @@ +//! This module implements the serialisation of derivations into the +//! [ATerm][] format used by C++ Nix. +//! +//! [ATerm]: http://program-transformation.org/Tools/ATermFormat.html + +use crate::derivation::output::Output; +use crate::derivation::string_escape::escape_string; +use std::collections::BTreeSet; +use std::{collections::BTreeMap, fmt, fmt::Write}; + +pub const DERIVATION_PREFIX: &str = "Derive"; +pub const PAREN_OPEN: char = '('; +pub const PAREN_CLOSE: char = ')'; +pub const BRACKET_OPEN: char = '['; +pub const BRACKET_CLOSE: char = ']'; +pub const COMMA: char = ','; +pub const QUOTE: char = '"'; + +fn write_array_elements( + writer: &mut impl Write, + quote: bool, + open: &str, + closing: &str, + elements: Vec<&str>, +) -> Result<(), fmt::Error> { + writer.write_str(open)?; + + for (index, element) in elements.iter().enumerate() { + if index > 0 { + writer.write_char(COMMA)?; + } + + if quote { + writer.write_char(QUOTE)?; + } + + writer.write_str(element)?; + + if quote { + writer.write_char(QUOTE)?; + } + } + + writer.write_str(closing)?; + + Ok(()) +} + +pub fn write_outputs( + writer: &mut impl Write, + outputs: &BTreeMap<String, Output>, +) -> Result<(), fmt::Error> { + writer.write_char(BRACKET_OPEN)?; + for (ii, (output_name, output)) in outputs.iter().enumerate() { + if ii > 0 { + writer.write_char(COMMA)?; + } + + let mut elements: Vec<&str> = vec![output_name, &output.path]; + + let (e2, e3) = match &output.hash_with_mode { + Some(hash) => match hash { + crate::nixhash::NixHashWithMode::Flat(h) => ( + h.algo.to_string(), + data_encoding::HEXLOWER.encode(&h.digest), + ), + crate::nixhash::NixHashWithMode::Recursive(h) => ( + format!("r:{}", h.algo), + data_encoding::HEXLOWER.encode(&h.digest), + ), + }, + None => ("".to_string(), "".to_string()), + }; + + elements.push(&e2); + elements.push(&e3); + + write_array_elements( + writer, + true, + &PAREN_OPEN.to_string(), + &PAREN_CLOSE.to_string(), + elements, + )? + } + writer.write_char(BRACKET_CLOSE)?; + + Ok(()) +} + +pub fn write_input_derivations( + writer: &mut impl Write, + input_derivations: &BTreeMap<String, BTreeSet<String>>, +) -> Result<(), fmt::Error> { + writer.write_char(COMMA)?; + writer.write_char(BRACKET_OPEN)?; + + for (ii, (input_derivation_path, input_derivation)) in input_derivations.iter().enumerate() { + if ii > 0 { + writer.write_char(COMMA)?; + } + + writer.write_char(PAREN_OPEN)?; + writer.write_char(QUOTE)?; + writer.write_str(input_derivation_path.as_str())?; + writer.write_char(QUOTE)?; + writer.write_char(COMMA)?; + + write_array_elements( + writer, + true, + &BRACKET_OPEN.to_string(), + &BRACKET_CLOSE.to_string(), + input_derivation.iter().map(|s| &**s).collect(), + )?; + + writer.write_char(PAREN_CLOSE)?; + } + + writer.write_char(BRACKET_CLOSE)?; + + Ok(()) +} + +pub fn write_input_sources( + writer: &mut impl Write, + input_sources: &BTreeSet<String>, +) -> Result<(), fmt::Error> { + writer.write_char(COMMA)?; + + write_array_elements( + writer, + true, + &BRACKET_OPEN.to_string(), + &BRACKET_CLOSE.to_string(), + input_sources.iter().map(|s| &**s).collect(), + )?; + + Ok(()) +} + +pub fn write_system(writer: &mut impl Write, platform: &str) -> Result<(), fmt::Error> { + writer.write_char(COMMA)?; + writer.write_str(escape_string(platform).as_str())?; + Ok(()) +} + +pub fn write_builder(writer: &mut impl Write, builder: &str) -> Result<(), fmt::Error> { + writer.write_char(COMMA)?; + writer.write_str(escape_string(builder).as_str())?; + Ok(()) +} +pub fn write_arguments(writer: &mut impl Write, arguments: &[String]) -> Result<(), fmt::Error> { + writer.write_char(COMMA)?; + write_array_elements( + writer, + true, + &BRACKET_OPEN.to_string(), + &BRACKET_CLOSE.to_string(), + arguments.iter().map(|s| &**s).collect(), + )?; + + Ok(()) +} + +pub fn write_enviroment( + writer: &mut impl Write, + environment: &BTreeMap<String, String>, +) -> Result<(), fmt::Error> { + writer.write_char(COMMA)?; + writer.write_char(BRACKET_OPEN)?; + + for (ii, (key, environment)) in environment.iter().enumerate() { + if ii > 0 { + writer.write_char(COMMA)?; + } + + write_array_elements( + writer, + false, + &PAREN_OPEN.to_string(), + &PAREN_CLOSE.to_string(), + vec![&escape_string(key), &escape_string(environment)], + )?; + } + + writer.write_char(BRACKET_CLOSE)?; + + Ok(()) +} diff --git a/tvix/nix-compat/src/lib.rs b/tvix/nix-compat/src/lib.rs new file mode 100644 index 000000000000..37d79f38fb37 --- /dev/null +++ b/tvix/nix-compat/src/lib.rs @@ -0,0 +1,5 @@ +pub mod derivation; +pub mod nar; +pub mod nixbase32; +pub mod nixhash; +pub mod store_path; diff --git a/tvix/nix-compat/src/nar/mod.rs b/tvix/nix-compat/src/nar/mod.rs new file mode 100644 index 000000000000..d3baa817825a --- /dev/null +++ b/tvix/nix-compat/src/nar/mod.rs @@ -0,0 +1 @@ +pub mod writer; diff --git a/tvix/nix-compat/src/nar/writer/mod.rs b/tvix/nix-compat/src/nar/writer/mod.rs new file mode 100644 index 000000000000..f24b69883876 --- /dev/null +++ b/tvix/nix-compat/src/nar/writer/mod.rs @@ -0,0 +1,219 @@ +//! Implements an interface for writing the Nix archive format (NAR). +//! +//! NAR files (and their hashed representations) are used in C++ Nix for +//! addressing fixed-output derivations and a variety of other things. +//! +//! NAR files can be output to any type that implements [`Write`], and content +//! can be read from any type that implementes [`BufRead`]. +//! +//! Writing a single file might look like this: +//! +//! ```rust +//! # use std::io::BufReader; +//! # let some_file: Vec<u8> = vec![0, 1, 2, 3, 4]; +//! +//! // Output location to write the NAR to. +//! let mut sink: Vec<u8> = Vec::new(); +//! +//! // Instantiate writer for this output location. +//! let mut nar = nix_compat::nar::writer::open(&mut sink)?; +//! +//! // Acquire metadata for the single file to output, and pass it in a +//! // `BufRead`-implementing type. +//! +//! let executable = false; +//! let size = some_file.len() as u64; +//! let mut reader = BufReader::new(some_file.as_slice()); +//! nar.file(executable, size, &mut reader)?; +//! # Ok::<(), std::io::Error>(()) +//! ``` + +use std::io::{ + self, BufRead, + ErrorKind::{InvalidInput, UnexpectedEof}, + Write, +}; + +mod wire; + +/// Convenience type alias for types implementing [`Write`]. +pub type Writer<'a> = dyn Write + 'a; + +/// Create a new NAR, writing the output to the specified writer. +pub fn open<'a, 'w: 'a>(writer: &'a mut Writer<'w>) -> io::Result<Node<'a, 'w>> { + let mut node = Node { writer }; + node.write(&wire::TOK_NAR)?; + Ok(node) +} + +/// Single node in a NAR file. +/// +/// A NAR can be thought of as a tree of nodes represented by this type. Each +/// node can be a file, a symlink or a directory containing other nodes. +pub struct Node<'a, 'w: 'a> { + writer: &'a mut Writer<'w>, +} + +impl<'a, 'w> Node<'a, 'w> { + fn write(&mut self, data: &[u8]) -> io::Result<()> { + self.writer.write_all(data) + } + + fn pad(&mut self, n: u64) -> io::Result<()> { + match (n & 7) as usize { + 0 => Ok(()), + n => self.write(&[0; 8][n..]), + } + } + + /// Make this node a symlink. + pub fn symlink(mut self, target: &str) -> io::Result<()> { + debug_assert!( + target.len() <= wire::MAX_TARGET_LEN, + "target.len() > {}", + wire::MAX_TARGET_LEN + ); + debug_assert!( + !target.contains('\0'), + "invalid target characters: {target:?}" + ); + debug_assert!(!target.is_empty(), "empty target"); + + self.write(&wire::TOK_SYM)?; + self.write(&target.len().to_le_bytes())?; + self.write(target.as_bytes())?; + self.pad(target.len() as u64)?; + self.write(&wire::TOK_PAR)?; + Ok(()) + } + + /// Make this node a single file. + pub fn file(mut self, executable: bool, size: u64, reader: &mut dyn BufRead) -> io::Result<()> { + self.write(if executable { + &wire::TOK_EXE + } else { + &wire::TOK_REG + })?; + + self.write(&size.to_le_bytes())?; + + let mut need = size; + while need != 0 { + let data = reader.fill_buf()?; + + if data.is_empty() { + return Err(UnexpectedEof.into()); + } + + let n = need.min(data.len() as u64) as usize; + self.write(&data[..n])?; + + need -= n as u64; + reader.consume(n); + } + + // bail if there's still data left in the passed reader. + // This uses the same code as [BufRead::has_data_left] (unstable). + if reader.fill_buf().map(|b| !b.is_empty())? { + return Err(io::Error::new( + InvalidInput, + "reader contained more data than specified size", + )); + } + + self.pad(size)?; + self.write(&wire::TOK_PAR)?; + + Ok(()) + } + + /// Make this node a directory, the content of which is set using the + /// resulting [`Directory`] value. + pub fn directory(mut self) -> io::Result<Directory<'a, 'w>> { + self.write(&wire::TOK_DIR)?; + Ok(Directory::new(self)) + } +} + +#[cfg(debug_assertions)] +type Name = String; +#[cfg(not(debug_assertions))] +type Name = (); + +fn into_name(_name: &str) -> Name { + #[cfg(debug_assertions)] + _name.to_owned() +} + +/// Content of a NAR node that represents a directory. +pub struct Directory<'a, 'w> { + node: Node<'a, 'w>, + prev_name: Option<Name>, +} + +impl<'a, 'w> Directory<'a, 'w> { + fn new(node: Node<'a, 'w>) -> Self { + Self { + node, + prev_name: None, + } + } + + /// Add an entry to the directory. + /// + /// The entry is simply another [`Node`], which can then be filled like the + /// root of a NAR (including, of course, by nesting directories). + pub fn entry(&mut self, name: &str) -> io::Result<Node<'_, 'w>> { + debug_assert!( + name.len() <= wire::MAX_NAME_LEN, + "name.len() > {}", + wire::MAX_NAME_LEN + ); + debug_assert!(!["", ".", ".."].contains(&name), "invalid name: {name:?}"); + debug_assert!( + !name.contains(['/', '\0']), + "invalid name characters: {name:?}" + ); + + match self.prev_name { + None => { + self.prev_name = Some(into_name(name)); + } + Some(ref mut _prev_name) => { + #[cfg(debug_assertions)] + { + assert!( + &**_prev_name < name, + "misordered names: {_prev_name:?} >= {name:?}" + ); + _prev_name.clear(); + _prev_name.push_str(name); + } + self.node.write(&wire::TOK_PAR)?; + } + } + + self.node.write(&wire::TOK_ENT)?; + self.node.write(&name.len().to_le_bytes())?; + self.node.write(name.as_bytes())?; + self.node.pad(name.len() as u64)?; + self.node.write(&wire::TOK_NOD)?; + + Ok(Node { + writer: &mut *self.node.writer, + }) + } + + /// Close a directory and write terminators for the directory to the NAR. + /// + /// **Important:** This *must* be called when all entries have been written + /// in a directory, otherwise the resulting NAR file will be invalid. + pub fn close(mut self) -> io::Result<()> { + if self.prev_name.is_some() { + self.node.write(&wire::TOK_PAR)?; + } + + self.node.write(&wire::TOK_PAR)?; + Ok(()) + } +} diff --git a/tvix/nix-compat/src/nar/writer/wire.rs b/tvix/nix-compat/src/nar/writer/wire.rs new file mode 100644 index 000000000000..98581ae3aa7c --- /dev/null +++ b/tvix/nix-compat/src/nar/writer/wire.rs @@ -0,0 +1,46 @@ +pub const MAX_NAME_LEN: usize = 255; +pub const MAX_TARGET_LEN: usize = 4095; + +#[cfg(test)] +fn token(xs: &[&str]) -> Vec<u8> { + let mut out = vec![]; + for x in xs { + let len = x.len() as u64; + out.extend_from_slice(&len.to_le_bytes()); + out.extend_from_slice(x.as_bytes()); + + let n = x.len() & 7; + if n != 0 { + const ZERO: [u8; 8] = [0; 8]; + out.extend_from_slice(&ZERO[n..]); + } + } + out +} + +pub const TOK_NAR: [u8; 56] = *b"\x0d\0\0\0\0\0\0\0nix-archive-1\0\0\0\x01\0\0\0\0\0\0\0(\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0type\0\0\0\0"; +pub const TOK_REG: [u8; 32] = *b"\x07\0\0\0\0\0\0\0regular\0\x08\0\0\0\0\0\0\0contents"; +pub const TOK_EXE: [u8; 64] = *b"\x07\0\0\0\0\0\0\0regular\0\x0a\0\0\0\0\0\0\0executable\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x08\0\0\0\0\0\0\0contents"; +pub const TOK_SYM: [u8; 32] = *b"\x07\0\0\0\0\0\0\0symlink\0\x06\0\0\0\0\0\0\0target\0\0"; +pub const TOK_DIR: [u8; 24] = *b"\x09\0\0\0\0\0\0\0directory\0\0\0\0\0\0\0"; +pub const TOK_ENT: [u8; 48] = *b"\x05\0\0\0\0\0\0\0entry\0\0\0\x01\0\0\0\0\0\0\0(\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0name\0\0\0\0"; +pub const TOK_NOD: [u8; 48] = *b"\x04\0\0\0\0\0\0\0node\0\0\0\0\x01\0\0\0\0\0\0\0(\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0type\0\0\0\0"; +pub const TOK_PAR: [u8; 16] = *b"\x01\0\0\0\0\0\0\0)\0\0\0\0\0\0\0"; + +#[test] +fn tokens() { + let cases: &[(&[u8], &[&str])] = &[ + (&TOK_NAR, &["nix-archive-1", "(", "type"]), + (&TOK_REG, &["regular", "contents"]), + (&TOK_EXE, &["regular", "executable", "", "contents"]), + (&TOK_SYM, &["symlink", "target"]), + (&TOK_DIR, &["directory"]), + (&TOK_ENT, &["entry", "(", "name"]), + (&TOK_NOD, &["node", "(", "type"]), + (&TOK_PAR, &[")"]), + ]; + + for &(tok, xs) in cases { + assert_eq!(tok, token(xs)); + } +} diff --git a/tvix/nix-compat/src/nixbase32.rs b/tvix/nix-compat/src/nixbase32.rs new file mode 100644 index 000000000000..3d1c276c63bb --- /dev/null +++ b/tvix/nix-compat/src/nixbase32.rs @@ -0,0 +1,167 @@ +//! Implements the slightly odd "base32" encoding that's used in Nix. +//! +//! Nix uses a custom alphabet. Contrary to other implementations (RFC4648), +//! encoding to "nix base32" doesn't use any padding, and reads in characters +//! in reverse order. +//! +//! This is also the main reason why we can't use `data_encoding::Encoding` - +//! it gets things wrong if there normally would be a need for padding. + +use std::fmt::Write; + +use thiserror::Error; + +const ALPHABET: &[u8; 32] = b"0123456789abcdfghijklmnpqrsvwxyz"; + +/// Errors that can occur while decoding nixbase32-encoded data. +#[derive(Debug, Eq, PartialEq, Error)] +pub enum Nixbase32DecodeError { + #[error("character {0:x} not in alphabet")] + CharacterNotInAlphabet(u8), + #[error("nonzero carry")] + NonzeroCarry(), +} + +/// Returns encoded input +pub fn encode(input: &[u8]) -> String { + let output_len = encode_len(input.len()); + let mut output = String::with_capacity(output_len); + + if output_len > 0 { + for n in (0..=output_len - 1).rev() { + let b = n * 5; // bit offset within the entire input + let i = b / 8; // input byte index + let j = b % 8; // bit offset within that input byte + + let mut c = input[i] >> j; + if i + 1 < input.len() { + // we want to right shift, and discard shifted out bits (unchecked) + // To do this without panicing, we need to do the shifting in u16 + // and convert back to u8 afterwards. + c |= ((input[i + 1] as u16) << (8 - j as u16)) as u8 + } + + output + .write_char(ALPHABET[(c & 0x1f) as usize] as char) + .unwrap(); + } + } + + output +} + +/// This maps a nixbase32-encoded character to its binary representation, which +/// is also the index of the character in the alphabet. +fn decode_char(encoded_char: &u8) -> Option<u8> { + Some(match encoded_char { + b'0'..=b'9' => encoded_char - b'0', + b'a'..=b'd' => encoded_char - b'a' + 10_u8, + b'f'..=b'n' => encoded_char - b'f' + 14_u8, + b'p'..=b's' => encoded_char - b'p' + 23_u8, + b'v'..=b'z' => encoded_char - b'v' + 27_u8, + _ => return None, + }) +} + +/// Returns decoded input +pub fn decode(input: &[u8]) -> Result<Vec<u8>, Nixbase32DecodeError> { + let output_len = decode_len(input.len()); + let mut output: Vec<u8> = vec![0x00; output_len]; + + // loop over all characters in reverse, and keep the iteration count in n. + for (n, c) in input.iter().rev().enumerate() { + match decode_char(c) { + None => return Err(Nixbase32DecodeError::CharacterNotInAlphabet(*c)), + Some(c_decoded) => { + let b = n * 5; + let i = b / 8; + let j = b % 8; + + let val = (c_decoded as u16).rotate_left(j as u32); + output[i] |= (val & 0x00ff) as u8; + let carry = ((val & 0xff00) >> 8) as u8; + + // if we're at the end of dst… + if i == output_len - 1 { + // but have a nonzero carry, the encoding is invalid. + if carry != 0 { + return Err(Nixbase32DecodeError::NonzeroCarry()); + } + } else { + output[i + 1] |= carry; + } + } + } + } + + Ok(output) +} + +/// Returns the decoded length of an input of length len. +pub fn decode_len(len: usize) -> usize { + (len * 5) / 8 +} + +/// Returns the encoded length of an input of length len +pub fn encode_len(len: usize) -> usize { + if len == 0 { + return 0; + } + (len * 8 - 1) / 5 + 1 +} + +#[cfg(test)] +mod tests { + use test_case::test_case; + + #[test_case("", vec![] ; "empty bytes")] + #[test_case("0z", vec![0x1f]; "one byte")] + #[test_case("00bgd045z0d4icpbc2yyz4gx48ak44la", vec![ + 0x8a, 0x12, 0x32, 0x15, 0x22, 0xfd, 0x91, 0xef, 0xbd, 0x60, 0xeb, 0xb2, 0x48, 0x1a, + 0xf8, 0x85, 0x80, 0xf6, 0x16, 0x00]; "store path")] + #[test_case("0c5b8vw40dy178xlpddw65q9gf1h2186jcc3p4swinwggbllv8mk", vec![ + 0xb3, 0xa2, 0x4d, 0xe9, 0x7a, 0x8f, 0xdb, 0xc8, 0x35, 0xb9, 0x83, 0x31, 0x69, 0x50, 0x10, 0x30, + 0xb8, 0x97, 0x70, 0x31, 0xbc, 0xb5, 0x4b, 0x3b, 0x3a, 0xc1, 0x37, 0x40, 0xf8, 0x46, 0xab, 0x30, + ]; "sha256")] + fn encode(enc: &str, dec: Vec<u8>) { + assert_eq!(enc, super::encode(&dec)); + } + + #[test_case("", Some(vec![]) ; "empty bytes")] + #[test_case("0z", Some(vec![0x1f]); "one byte")] + #[test_case("00bgd045z0d4icpbc2yyz4gx48ak44la", Some(vec![ + 0x8a, 0x12, 0x32, 0x15, 0x22, 0xfd, 0x91, 0xef, 0xbd, 0x60, 0xeb, 0xb2, 0x48, 0x1a, + 0xf8, 0x85, 0x80, 0xf6, 0x16, 0x00]); "store path")] + #[test_case("0c5b8vw40dy178xlpddw65q9gf1h2186jcc3p4swinwggbllv8mk", Some(vec![ + 0xb3, 0xa2, 0x4d, 0xe9, 0x7a, 0x8f, 0xdb, 0xc8, 0x35, 0xb9, 0x83, 0x31, 0x69, 0x50, 0x10, 0x30, + 0xb8, 0x97, 0x70, 0x31, 0xbc, 0xb5, 0x4b, 0x3b, 0x3a, 0xc1, 0x37, 0x40, 0xf8, 0x46, 0xab, 0x30, + ]); "sha256")] + // this is invalid encoding, because it encodes 10 1-bytes, so the carry + // would be 2 1-bytes + #[test_case("zz", None; "invalid encoding-1")] + // this is an even more specific example - it'd decode as 00000000 11 + #[test_case("c0", None; "invalid encoding-2")] + + fn decode(enc: &str, dec: Option<Vec<u8>>) { + match dec { + Some(dec) => { + // The decode needs to match what's passed in dec + assert_eq!(dec, super::decode(enc.as_bytes()).unwrap()); + } + None => { + // the decode needs to be an error + assert!(super::decode(enc.as_bytes()).is_err()); + } + } + } + + #[test] + fn encode_len() { + assert_eq!(super::encode_len(20), 32) + } + + #[test] + fn decode_len() { + assert_eq!(super::decode_len(32), 20) + } +} diff --git a/tvix/nix-compat/src/nixhash/algos.rs b/tvix/nix-compat/src/nixhash/algos.rs new file mode 100644 index 000000000000..d6b0bf47bdb7 --- /dev/null +++ b/tvix/nix-compat/src/nixhash/algos.rs @@ -0,0 +1,39 @@ +use std::fmt::Display; + +use serde::{Deserialize, Serialize}; + +use crate::nixhash::Error; + +/// This are the hash algorithms supported by cppnix. +#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize)] +pub enum HashAlgo { + Md5, + Sha1, + Sha256, + Sha512, +} + +impl Display for HashAlgo { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match &self { + HashAlgo::Md5 => write!(f, "md5"), + HashAlgo::Sha1 => write!(f, "sha1"), + HashAlgo::Sha256 => write!(f, "sha256"), + HashAlgo::Sha512 => write!(f, "sha512"), + } + } +} + +impl TryFrom<&str> for HashAlgo { + type Error = Error; + + fn try_from(algo_str: &str) -> Result<Self, Self::Error> { + match algo_str { + "md5" => Ok(Self::Md5), + "sha1" => Ok(Self::Sha1), + "sha256" => Ok(Self::Sha256), + "sha512" => Ok(Self::Sha512), + _ => Err(Error::InvalidAlgo(algo_str.to_string())), + } + } +} diff --git a/tvix/nix-compat/src/nixhash/mod.rs b/tvix/nix-compat/src/nixhash/mod.rs new file mode 100644 index 000000000000..dccba7a5ba27 --- /dev/null +++ b/tvix/nix-compat/src/nixhash/mod.rs @@ -0,0 +1,454 @@ +use crate::nixbase32; +use data_encoding::{BASE64, BASE64_NOPAD, HEXLOWER}; +use thiserror::Error; + +mod algos; +mod with_mode; + +pub use algos::HashAlgo; +pub use with_mode::NixHashWithMode; + +/// Nix allows specifying hashes in various encodings, and magically just +/// derives the encoding. +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct NixHash { + pub digest: Vec<u8>, + + pub algo: HashAlgo, +} + +impl NixHash { + /// Constructs a new [NixHash] by specifying [HashAlgo] and digest. + pub fn new(algo: HashAlgo, digest: Vec<u8>) -> Self { + Self { algo, digest } + } + + /// Formats a [NixHash] in the Nix default hash format, + /// which is the algo, followed by a colon, then the lower hex encoded digest. + pub fn to_nix_hash_string(&self) -> String { + format!("{}:{}", self.algo, HEXLOWER.encode(&self.digest)) + } +} + +/// Errors related to NixHash construction. +#[derive(Debug, Error)] +pub enum Error { + #[error("invalid hash algo: {0}")] + InvalidAlgo(String), + #[error("invalid SRI string: {0}")] + InvalidSRI(String), + #[error("invalid encoded digest length '{0}' for algo {1}")] + InvalidEncodedDigestLength(usize, HashAlgo), + #[error("invalid base16 encoding: {0}")] + InvalidBase16Encoding(data_encoding::DecodeError), + #[error("invalid base32 encoding: {0}")] + InvalidBase32Encoding(nixbase32::Nixbase32DecodeError), + #[error("invalid base64 encoding: {0}")] + InvalidBase64Encoding(data_encoding::DecodeError), + #[error("conflicting hash algo: {0} (hash_algo) vs {1} (inline)")] + ConflictingHashAlgos(String, String), + #[error("missing inline hash algo, but no externally-specified algo: {0}")] + MissingInlineHashAlgo(String), +} + +/// parses a string to a nix hash. +/// +/// Hashes can be: +/// - Nix hash strings +/// - SRI hashes +/// - bare digests +/// +/// Encoding for Nix hash strings or bare digests can be: +/// - base16 (lowerhex), +/// - nixbase32, +/// - base64 (StdEncoding) +/// - sri string +/// +/// The encoding is derived from the length of the string and the hash type. +/// The hash is communicated out-of-band, but might also be in-band (in the +/// case of a nix hash string or SRI), in which it needs to be consistent with the +/// one communicated out-of-band. +pub fn from_str(s: &str, algo_str: Option<&str>) -> Result<NixHash, Error> { + // validate algo_str, construct hash_algo + let algo: Option<HashAlgo> = match &algo_str { + Some("sha1") => Some(HashAlgo::Sha1), + Some("sha256") => Some(HashAlgo::Sha256), + Some("sha512") => Some(HashAlgo::Sha512), + Some("md5") => Some(HashAlgo::Md5), + Some(e) => return Err(Error::InvalidAlgo(e.to_string())), + None => None, + }; + + // peek at the beginning of the string. Let's detect the SRI path first. + if s.starts_with("sha1-") + || s.starts_with("sha256-") + || s.starts_with("sha512-") + || s.starts_with("md5-") + { + let parsed_nixhash = from_sri_str(s)?; + // ensure the algo matches with what has been passed externally, if so. + if let Some(algo) = algo { + if algo != parsed_nixhash.algo { + return Err(Error::ConflictingHashAlgos( + algo.to_string(), + parsed_nixhash.algo.to_string(), + )); + } + } + return Ok(parsed_nixhash); + } + + // Now, peek at the beginning again to see if it's a Nix Hash + if s.starts_with("sha1:") + || s.starts_with("sha256:") + || s.starts_with("sha512:") + || s.starts_with("md5:") + { + let parsed_nixhash = from_nix_str(s)?; + // ensure the algo matches with what has been passed externally, if so. + if let Some(algo) = algo { + if algo != parsed_nixhash.algo { + return Err(Error::ConflictingHashAlgos( + algo.to_string(), + parsed_nixhash.algo.to_string(), + )); + } + } + return Ok(parsed_nixhash); + } + + // In all other cases, we assume a bare digest, so there MUST be an externally-passed algo. + match algo { + // Fail if there isn't. + None => Err(Error::MissingInlineHashAlgo(s.to_string())), + Some(algo) => decode_digest(s, algo), + } +} + +/// Parses a Nix hash string ($algo:$digest) to a NixHash. +pub fn from_nix_str(s: &str) -> Result<NixHash, Error> { + if let Some(rest) = s.strip_prefix("sha1:") { + decode_digest(rest, HashAlgo::Sha1) + } else if let Some(rest) = s.strip_prefix("sha256:") { + decode_digest(rest, HashAlgo::Sha256) + } else if let Some(rest) = s.strip_prefix("sha512:") { + decode_digest(rest, HashAlgo::Sha512) + } else if let Some(rest) = s.strip_prefix("md5:") { + decode_digest(rest, HashAlgo::Md5) + } else { + Err(Error::InvalidAlgo(s.to_string())) + } +} + +/// Parses a Nix SRI string to a NixHash. +/// Contrary to the SRI spec, Nix doesn't support SRI strings with multiple hashes, +/// only supports sha256 and sha512 from the spec, and supports sha1 and md5 +/// additionally. +/// It also accepts SRI strings where the base64 has an with invalid padding. +pub fn from_sri_str(s: &str) -> Result<NixHash, Error> { + // try to find the first occurence of "-" + let idx = s.as_bytes().iter().position(|&e| e == b'-'); + + if idx.is_none() { + return Err(Error::InvalidSRI(s.to_string())); + } + + let idx = idx.unwrap(); + + // try to map the part before that `-` to a supported hash algo: + let algo: HashAlgo = s[..idx].try_into()?; + + // the rest should be the digest (as Nix doesn't support more than one hash in an SRI string). + let encoded_digest = &s[idx + 1..]; + let actual_len = encoded_digest.as_bytes().len(); + + // verify the digest length matches what we'd expect from the hash function, + // and then either try decoding as BASE64 or BASE64_NOPAD. + // This will also reject SRI strings with more than one hash, because the length won't match + if actual_len == BASE64.encode_len(hash_algo_length(&algo)) { + let digest: Vec<u8> = BASE64 + .decode(encoded_digest.as_bytes()) + .map_err(Error::InvalidBase64Encoding)?; + Ok(NixHash { digest, algo }) + } else if actual_len == BASE64_NOPAD.encode_len(hash_algo_length(&algo)) { + let digest: Vec<u8> = BASE64_NOPAD + .decode(encoded_digest.as_bytes()) + .map_err(Error::InvalidBase64Encoding)?; + Ok(NixHash { digest, algo }) + } else { + // NOTE: As of now, we reject SRI hashes containing additional + // characters (which upstream Nix seems to simply truncate), as + // there's no occurence of this is in nixpkgs. + // It most likely should also be a bug in Nix. + Err(Error::InvalidEncodedDigestLength( + encoded_digest.as_bytes().len(), + algo, + )) + } +} + +/// decode a plain digest depending on the hash algo specified externally. +fn decode_digest(s: &str, algo: HashAlgo) -> Result<NixHash, Error> { + // for the chosen hash algo, calculate the expected (decoded) digest length + // (as bytes) + let expected_digest_len = hash_algo_length(&algo); + + Ok(NixHash { + digest: match s.len() { + n if n == data_encoding::HEXLOWER.encode_len(expected_digest_len) => { + data_encoding::HEXLOWER + .decode(s.as_ref()) + .map_err(Error::InvalidBase16Encoding) + } + n if n == nixbase32::encode_len(expected_digest_len) => { + nixbase32::decode(s.as_ref()).map_err(Error::InvalidBase32Encoding) + } + n if n == BASE64.encode_len(expected_digest_len) => BASE64 + .decode(s.as_ref()) + .map_err(Error::InvalidBase64Encoding), + _ => return Err(Error::InvalidEncodedDigestLength(s.len(), algo)), + }?, + algo, + }) +} + +// return the number of bytes in the digest of the given hash algo. +fn hash_algo_length(hash_algo: &HashAlgo) -> usize { + match hash_algo { + HashAlgo::Sha1 => 20, + HashAlgo::Sha256 => 32, + HashAlgo::Sha512 => 64, + HashAlgo::Md5 => 16, + } +} + +#[cfg(test)] +mod tests { + use crate::{ + nixbase32, + nixhash::{self, HashAlgo, NixHash}, + }; + use test_case::test_case; + const DIGEST_SHA1: &[u8] = &[ + 0x60, 0x16, 0x77, 0x79, 0x97, 0xc3, 0x0a, 0xb0, 0x24, 0x13, 0xcf, 0x50, 0x95, 0x62, 0x2c, + 0xd7, 0x92, 0x42, 0x83, 0xac, + ]; + + const DIGEST_SHA256: &[u8] = &[ + 0xa5, 0xce, 0x9c, 0x15, 0x5e, 0xd0, 0x93, 0x97, 0x61, 0x46, 0x46, 0xc9, 0x71, 0x7f, 0xc7, + 0xcd, 0x94, 0xb1, 0x02, 0x3d, 0x7b, 0x76, 0xb6, 0x18, 0xd4, 0x09, 0xe4, 0xfe, 0xfd, 0x6e, + 0x9d, 0x39, + ]; + + const DIGEST_SHA512: &[u8] = &[ + 0xab, 0x40, 0xd0, 0xbe, 0x35, 0x41, 0xf0, 0x77, 0x4b, 0xba, 0x78, 0x15, 0xd1, 0x3d, 0x10, + 0xb0, 0x32, 0x52, 0xe9, 0x6e, 0x95, 0xf7, 0xdb, 0xb4, 0xee, 0x99, 0xa3, 0xb4, 0x31, 0xc2, + 0x16, 0x62, 0xfd, 0x69, 0x71, 0xa0, 0x20, 0x16, 0x0e, 0x39, 0x84, 0x8a, 0xa5, 0xf3, 0x05, + 0xb9, 0xbe, 0x0f, 0x78, 0x72, 0x7b, 0x2b, 0x07, 0x89, 0xe3, 0x9f, 0x12, 0x4d, 0x21, 0xe9, + 0x2b, 0x8f, 0x39, 0xef, + ]; + const DIGEST_MD5: &[u8] = &[ + 0xc4, 0x87, 0x4a, 0x88, 0x97, 0x44, 0x0b, 0x39, 0x3d, 0x86, 0x2d, 0x8f, 0xd4, 0x59, 0x07, + 0x3f, + ]; + + fn to_base16(digest: &[u8]) -> String { + data_encoding::HEXLOWER.encode(digest) + } + + fn to_nixbase32(digest: &[u8]) -> String { + nixbase32::encode(digest) + } + + fn to_base64(digest: &[u8]) -> String { + data_encoding::BASE64.encode(digest) + } + + fn to_base64_nopad(digest: &[u8]) -> String { + data_encoding::BASE64_NOPAD.encode(digest) + } + + // TODO + fn make_nixhash(algo: &HashAlgo, digest_encoded: String) -> String { + format!("{}:{}", algo, digest_encoded) + } + fn make_sri_string(algo: &HashAlgo, digest_encoded: String) -> String { + format!("{}-{}", algo, digest_encoded) + } + + /// Test parsing a hash string in various formats, and also when/how the out-of-band algo is needed. + #[test_case(DIGEST_SHA1, HashAlgo::Sha1; "sha1")] + #[test_case(DIGEST_SHA256, HashAlgo::Sha256; "sha256")] + #[test_case(DIGEST_SHA512, HashAlgo::Sha512; "sha512")] + #[test_case(DIGEST_MD5, HashAlgo::Md5; "md5")] + fn from_str(digest: &[u8], algo: HashAlgo) { + let expected_hash = NixHash { + digest: digest.to_vec(), + algo: algo.clone(), + }; + // parse SRI + { + // base64 without out-of-band algo + let s = make_sri_string(&algo, to_base64(digest)); + let h = nixhash::from_str(&s, None).expect("must succeed"); + assert_eq!(expected_hash, h); + + // base64 with out-of-band-algo + let s = make_sri_string(&algo, to_base64(digest)); + let h = nixhash::from_str(&s, Some(&algo.to_string())).expect("must succeed"); + assert_eq!(expected_hash, h); + + // base64_nopad without out-of-band algo + let s = make_sri_string(&algo, to_base64_nopad(digest)); + let h = nixhash::from_str(&s, None).expect("must succeed"); + assert_eq!(expected_hash, h); + + // base64_nopad with out-of-band-algo + let s = make_sri_string(&algo, to_base64_nopad(digest)); + let h = nixhash::from_str(&s, Some(&algo.to_string())).expect("must succeed"); + assert_eq!(expected_hash, h); + } + + // parse plain base16. should succeed with algo out-of-band, but fail without. + { + let s = to_base16(digest); + nixhash::from_str(&s, None).expect_err("must fail"); + let h = nixhash::from_str(&s, Some(&algo.to_string())).expect("must succeed"); + assert_eq!(expected_hash, h); + } + + // parse plain nixbase32. should succeed with algo out-of-band, but fail without. + { + let s = to_nixbase32(digest); + nixhash::from_str(&s, None).expect_err("must fail"); + let h = nixhash::from_str(&s, Some(&algo.to_string())).expect("must succeed"); + assert_eq!(expected_hash, h); + } + + // parse plain base64. should succeed with algo out-of-band, but fail without. + { + let s = to_base64(digest); + nixhash::from_str(&s, None).expect_err("must fail"); + let h = nixhash::from_str(&s, Some(&algo.to_string())).expect("must succeed"); + assert_eq!(expected_hash, h); + } + + // parse Nix hash strings + { + // base16. should succeed with both algo out-of-band and in-band. + { + let s = make_nixhash(&algo, to_base16(digest)); + assert_eq!( + expected_hash, + nixhash::from_str(&s, None).expect("must succeed") + ); + assert_eq!( + expected_hash, + nixhash::from_str(&s, Some(&algo.to_string())).expect("must succeed") + ); + } + // nixbase32. should succeed with both algo out-of-band and in-band. + { + let s = make_nixhash(&algo, to_nixbase32(digest)); + assert_eq!( + expected_hash, + nixhash::from_str(&s, None).expect("must succeed") + ); + assert_eq!( + expected_hash, + nixhash::from_str(&s, Some(&algo.to_string())).expect("must succeed") + ); + } + // base64. should succeed with both algo out-of-band and in-band. + { + let s = make_nixhash(&algo, to_base64(digest)); + assert_eq!( + expected_hash, + nixhash::from_str(&s, None).expect("must succeed") + ); + assert_eq!( + expected_hash, + nixhash::from_str(&s, Some(&algo.to_string())).expect("must succeed") + ); + } + } + } + + /// Test parsing an SRI hash via the [nixhash::from_sri_str] method. + #[test] + fn from_sri_str() { + let nix_hash = nixhash::from_sri_str("sha256-pc6cFV7Qk5dhRkbJcX/HzZSxAj17drYY1Ank/v1unTk=") + .expect("must succeed"); + + assert_eq!(HashAlgo::Sha256, nix_hash.algo); + assert_eq!( + vec![ + 0xa5, 0xce, 0x9c, 0x15, 0x5e, 0xd0, 0x93, 0x97, 0x61, 0x46, 0x46, 0xc9, 0x71, 0x7f, + 0xc7, 0xcd, 0x94, 0xb1, 0x02, 0x3d, 0x7b, 0x76, 0xb6, 0x18, 0xd4, 0x09, 0xe4, 0xfe, + 0xfd, 0x6e, 0x9d, 0x39 + ], + nix_hash.digest + ) + } + + /// Ensure we detect truncated base64 digests, where the digest size + /// doesn't match what's expected from that hash function. + #[test] + fn from_sri_str_truncated() { + nixhash::from_sri_str("sha256-pc6cFV7Qk5dhRkbJcX/HzZSxAj17drYY1Ank") + .expect_err("must fail"); + } + + /// Ensure we fail on SRI hashes that Nix doesn't support. + #[test] + fn from_sri_str_unsupported() { + nixhash::from_sri_str( + "sha384-o4UVSl89mIB0sFUK+3jQbG+C9Zc9dRlV/Xd3KAvXEbhqxu0J5OAdg6b6VHKHwQ7U", + ) + .expect_err("must fail"); + } + + /// Ensure we reject invalid base64 encoding + #[test] + fn from_sri_str_invalid_base64() { + nixhash::from_sri_str("sha256-invalid=base64").expect_err("must fail"); + } + + /// Ensure we reject SRI strings with multiple hashes, as Nix doesn't support that. + #[test] + fn from_sri_str_unsupported_multiple() { + nixhash::from_sri_str("sha256-ngth6szLtC1IJIYyz3lhftzL8SkrJkqPyPve+dGqa1Y= sha512-q0DQvjVB8HdLungV0T0QsDJS6W6V99u07pmjtDHCFmL9aXGgIBYOOYSKpfMFub4PeHJ7KweJ458STSHpK4857w==").expect_err("must fail"); + } + + /// Nix also accepts SRI strings with missing padding, but only in case the + /// string is expressed as SRI, so it still needs to have a `sha256-` prefix. + /// + /// This both seems to work if it is passed with and without specifying the + /// hash algo out-of-band (hash = "sha256-…" or sha256 = "sha256-…") + /// + /// Passing the same broken base64 string, but not as SRI, while passing + /// the hash algo out-of-band does not work. + #[test] + fn sha256_broken_padding() { + let broken_base64 = "fgIr3TyFGDAXP5+qoAaiMKDg/a1MlT6Fv/S/DaA24S8"; + // if padded with a trailing '=' + let expected_digest = vec![ + 0x7e, 0x02, 0x2b, 0xdd, 0x3c, 0x85, 0x18, 0x30, 0x17, 0x3f, 0x9f, 0xaa, 0xa0, 0x06, + 0xa2, 0x30, 0xa0, 0xe0, 0xfd, 0xad, 0x4c, 0x95, 0x3e, 0x85, 0xbf, 0xf4, 0xbf, 0x0d, + 0xa0, 0x36, 0xe1, 0x2f, + ]; + + // passing hash algo out of band should succeed + let nix_hash = nixhash::from_str(&format!("sha256-{}", &broken_base64), Some("sha256")) + .expect("must succeed"); + assert_eq!(&expected_digest, &nix_hash.digest); + + // not passing hash algo out of band should succeed + let nix_hash = + nixhash::from_str(&format!("sha256-{}", &broken_base64), None).expect("must succeed"); + assert_eq!(&expected_digest, &nix_hash.digest); + + // not passing SRI, but hash algo out of band should fail + nixhash::from_str(broken_base64, Some("sha256")).expect_err("must fail"); + } +} diff --git a/tvix/nix-compat/src/nixhash/with_mode.rs b/tvix/nix-compat/src/nixhash/with_mode.rs new file mode 100644 index 000000000000..1908f27b4759 --- /dev/null +++ b/tvix/nix-compat/src/nixhash/with_mode.rs @@ -0,0 +1,136 @@ +use crate::nixbase32; +use crate::nixhash::{HashAlgo, NixHash}; +use serde::ser::SerializeMap; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; + +pub enum NixHashMode { + Flat, + Recursive, +} + +impl NixHashMode { + pub fn prefix(self) -> &'static str { + match self { + Self::Flat => "", + Self::Recursive => "r:", + } + } +} + +/// A Nix Hash can either be flat or recursive. +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum NixHashWithMode { + Flat(NixHash), + Recursive(NixHash), +} + +impl NixHashWithMode { + pub fn mode(&self) -> NixHashMode { + match self { + Self::Flat(_) => NixHashMode::Flat, + Self::Recursive(_) => NixHashMode::Recursive, + } + } + + pub fn digest(&self) -> &NixHash { + match self { + Self::Flat(ref h) => h, + Self::Recursive(ref h) => h, + } + } + + /// Formats a [NixHashWithMode] in the Nix default hash format, + /// which is the algo, followed by a colon, then the lower hex encoded digest. + /// In case the hash itself is recursive, a `r:` is added as prefix + pub fn to_nix_hash_string(&self) -> String { + String::from(self.mode().prefix()) + &self.digest().to_nix_hash_string() + } +} + +impl Serialize for NixHashWithMode { + /// map a NixHashWithMode into the serde data model. + fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> + where + S: Serializer, + { + let mut map = serializer.serialize_map(Some(2))?; + match self { + NixHashWithMode::Flat(h) => { + map.serialize_entry("hash", &nixbase32::encode(&h.digest))?; + map.serialize_entry("hashAlgo", &h.algo.to_string())?; + } + NixHashWithMode::Recursive(h) => { + map.serialize_entry("hash", &nixbase32::encode(&h.digest))?; + map.serialize_entry("hashAlgo", &format!("r:{}", &h.algo.to_string()))?; + } + }; + map.end() + } +} + +impl<'de> Deserialize<'de> for NixHashWithMode { + /// map the serde data model into a NixHashWithMode. + /// + /// The serde data model has a `hash` field (containing a digest in nixbase32), + /// and a `hashAlgo` field, containing the stringified hash algo. + /// In case the hash is recursive, hashAlgo also has a `r:` prefix. + /// + /// This is to match how `nix show-derivation` command shows them in JSON + /// representation. + fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> + where + D: Deserializer<'de>, + { + // TODO: don't use serde_json here? + // TODO: serde seems to simply set `hash_with_mode` to None if hash + // and hashAlgo fail, but that should be a proper deserialization error + // that should be propagated to the user! + + let json = serde_json::Value::deserialize(deserializer)?; + match json.as_object() { + None => Err(serde::de::Error::custom("couldn't parse as map"))?, + Some(map) => { + let digest: Vec<u8> = { + if let Some(v) = map.get("hash") { + if let Some(s) = v.as_str() { + data_encoding::HEXLOWER + .decode(s.as_bytes()) + .map_err(|e| serde::de::Error::custom(e.to_string()))? + } else { + return Err(serde::de::Error::custom( + "couldn't parse 'hash' as string", + )); + } + } else { + return Err(serde::de::Error::custom("couldn't extract 'hash' key")); + } + }; + + if let Some(v) = map.get("hashAlgo") { + if let Some(s) = v.as_str() { + match s.strip_prefix("r:") { + Some(rest) => Ok(NixHashWithMode::Recursive(NixHash::new( + HashAlgo::try_from(rest).map_err(|e| { + serde::de::Error::custom(format!("unable to parse algo: {}", e)) + })?, + digest, + ))), + None => Ok(NixHashWithMode::Flat(NixHash::new( + HashAlgo::try_from(s).map_err(|e| { + serde::de::Error::custom(format!("unable to parse algo: {}", e)) + })?, + digest, + ))), + } + } else { + Err(serde::de::Error::custom( + "couldn't parse 'hashAlgo' as string", + )) + } + } else { + Err(serde::de::Error::custom("couldn't extract 'hashAlgo' key")) + } + } + } + } +} diff --git a/tvix/nix-compat/src/store_path/mod.rs b/tvix/nix-compat/src/store_path/mod.rs new file mode 100644 index 000000000000..a67132cb8e30 --- /dev/null +++ b/tvix/nix-compat/src/store_path/mod.rs @@ -0,0 +1,294 @@ +use crate::nixbase32::{self, Nixbase32DecodeError}; +use std::{fmt, path::PathBuf}; +use thiserror::Error; + +mod utils; + +pub use utils::*; + +pub const DIGEST_SIZE: usize = 20; +// lazy_static doesn't allow us to call NIXBASE32.encode_len(), so we ran it +// manually and have an assert in the tests. +pub const ENCODED_DIGEST_SIZE: usize = 32; + +// The store dir prefix, without trailing slash. +// That's usually where the Nix store is mounted at. +pub const STORE_DIR: &str = "/nix/store"; +pub const STORE_DIR_WITH_SLASH: &str = "/nix/store/"; + +/// Errors that can occur when parsing a literal store path +#[derive(Debug, PartialEq, Eq, Error)] +pub enum Error { + #[error("Dash is missing between hash and name")] + MissingDash(), + #[error("Hash encoding is invalid: {0}")] + InvalidHashEncoding(Nixbase32DecodeError), + #[error("{0}")] + InvalidName(NameError), + #[error("Tried to parse an absolute path which was missing the store dir prefix.")] + MissingStoreDir(), +} + +/// Errors that can occur during the validation of name characters. +#[derive(Debug, PartialEq, Eq, Error)] +pub enum NameError { + #[error("Invalid name: {0}")] + InvalidName(String), +} + +impl From<NameError> for Error { + fn from(e: NameError) -> Self { + Self::InvalidName(e) + } +} + +/// Represents a path in the Nix store (a direct child of [STORE_DIR]). +/// +/// It consists of a digest (20 bytes), and a name, which is a string. +/// The name may only contain ASCII alphanumeric, or one of the following +/// characters: `-`, `_`, `.`, `+`, `?`, `=`. +/// The name is usually used to describe the pname and version of a package. +/// Derivation paths can also be represented as store paths, their names just +/// end with the `.drv` prefix. +/// +/// A [StorePath] does not encode any additional subpath "inside" the store +/// path. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct StorePath { + pub digest: [u8; DIGEST_SIZE], + pub name: String, +} + +impl StorePath { + /// Construct a [StorePath] by passing the `$digest-$name` string + /// that comes after [STORE_DIR_WITH_SLASH]. + pub fn from_string(s: &str) -> Result<StorePath, Error> { + // the whole string needs to be at least: + // + // - 32 characters (encoded hash) + // - 1 dash + // - 1 character for the name + if s.len() < ENCODED_DIGEST_SIZE + 2 { + Err(NameError::InvalidName("".to_string()))?; + } + + let digest = match nixbase32::decode(s[..ENCODED_DIGEST_SIZE].as_bytes()) { + Ok(decoded) => decoded, + Err(decoder_error) => return Err(Error::InvalidHashEncoding(decoder_error)), + }; + + if s.as_bytes()[ENCODED_DIGEST_SIZE] != b'-' { + return Err(Error::MissingDash()); + } + + StorePath::validate_name(&s[ENCODED_DIGEST_SIZE + 2..])?; + + Ok(StorePath { + name: s[ENCODED_DIGEST_SIZE + 1..].to_string(), + digest: digest.try_into().expect("size is known"), + }) + } + + /// Construct a [StorePath] from an absolute store path string. + /// This is equivalent to calling [StorePath::from_string], but stripping + /// the [STORE_DIR_WITH_SLASH] prefix before. + pub fn from_absolute_path(s: &str) -> Result<StorePath, Error> { + match s.strip_prefix(STORE_DIR_WITH_SLASH) { + Some(s_stripped) => Self::from_string(s_stripped), + None => Err(Error::MissingStoreDir()), + } + } + + /// Decompose a string into a [StorePath] and a [PathBuf] containing the + /// rest of the path, or an error. + pub fn from_absolute_path_full(s: &str) -> Result<(StorePath, PathBuf), Error> { + // strip [STORE_DIR_WITH_SLASH] from s + match s.strip_prefix(STORE_DIR_WITH_SLASH) { + None => Err(Error::MissingStoreDir()), + Some(rest) => { + // put rest in a PathBuf + let mut p = PathBuf::new(); + p.push(rest); + + let mut it = p.components(); + + // The first component of the rest must be parse-able as a [StorePath] + if let Some(s) = it.next() { + // convert first component to string + if let Some(s) = s.as_os_str().to_str() { + let store_path = StorePath::from_string(s)?; + let rest_buf: PathBuf = it.collect(); + Ok((store_path, rest_buf)) + } else { + Err(Error::InvalidName(NameError::InvalidName("".to_string()))) + } + } else { + Err(Error::InvalidName(NameError::InvalidName("".to_string()))) + } + } + } + } + + /// Converts the [StorePath] to an absolute store path string. + /// That is just the string representation, prefixed with the store prefix + /// ([STORE_DIR_WITH_SLASH]), + pub fn to_absolute_path(&self) -> String { + format!("{}{}", STORE_DIR_WITH_SLASH, self) + } + + /// Checks a given &str to match the restrictions for store path names. + pub fn validate_name(s: &str) -> Result<(), NameError> { + for c in s.chars() { + if c.is_ascii_alphanumeric() + || c == '-' + || c == '_' + || c == '.' + || c == '+' + || c == '?' + || c == '=' + { + continue; + } + + return Err(NameError::InvalidName(s.to_string())); + } + + Ok(()) + } +} + +impl fmt::Display for StorePath { + /// The string representation of a store path starts with a digest (20 + /// bytes), [crate::nixbase32]-encoded, followed by a `-`, + /// and ends with the name. + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}-{}", nixbase32::encode(&self.digest), self.name) + } +} + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + + use crate::nixbase32; + use crate::store_path::{DIGEST_SIZE, ENCODED_DIGEST_SIZE}; + use test_case::test_case; + + use super::{Error, NameError, StorePath}; + + #[test] + fn encoded_digest_size() { + assert_eq!(ENCODED_DIGEST_SIZE, nixbase32::encode_len(DIGEST_SIZE)); + } + + #[test] + fn happy_path() { + let example_nix_path_str = + "00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432"; + let nixpath = + StorePath::from_string(example_nix_path_str).expect("Error parsing example string"); + + let expected_digest: [u8; DIGEST_SIZE] = [ + 0x8a, 0x12, 0x32, 0x15, 0x22, 0xfd, 0x91, 0xef, 0xbd, 0x60, 0xeb, 0xb2, 0x48, 0x1a, + 0xf8, 0x85, 0x80, 0xf6, 0x16, 0x00, + ]; + + assert_eq!("net-tools-1.60_p20170221182432", nixpath.name); + assert_eq!(nixpath.digest, expected_digest); + + assert_eq!(example_nix_path_str, nixpath.to_string()) + } + + #[test] + fn invalid_hash_length() { + StorePath::from_string("00bgd045z0d4icpbc2yy-net-tools-1.60_p20170221182432") + .expect_err("must fail"); + } + + #[test] + fn invalid_encoding_hash() { + StorePath::from_string("00bgd045z0d4icpbc2yyz4gx48aku4la-net-tools-1.60_p20170221182432") + .expect_err("must fail"); + } + + #[test] + fn more_than_just_the_bare_nix_store_path() { + StorePath::from_string( + "00bgd045z0d4icpbc2yyz4gx48aku4la-net-tools-1.60_p20170221182432/bin/arp", + ) + .expect_err("must fail"); + } + + #[test] + fn no_dash_between_hash_and_name() { + StorePath::from_string("00bgd045z0d4icpbc2yyz4gx48ak44lanet-tools-1.60_p20170221182432") + .expect_err("must fail"); + } + + #[test] + fn absolute_path() { + let example_nix_path_str = + "00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432"; + let nixpath_expected = StorePath::from_string(example_nix_path_str).expect("must parse"); + + let nixpath_actual = StorePath::from_absolute_path( + "/nix/store/00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432", + ) + .expect("must parse"); + + assert_eq!(nixpath_expected, nixpath_actual); + + assert_eq!( + "/nix/store/00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432", + nixpath_actual.to_absolute_path(), + ); + } + + #[test] + fn absolute_path_missing_prefix() { + assert_eq!( + Error::MissingStoreDir(), + StorePath::from_absolute_path("foobar-123").expect_err("must fail") + ); + } + + #[test_case( + "/nix/store/00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432", + (StorePath::from_string("00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432").unwrap(), PathBuf::new()) + ; "without prefix")] + #[test_case( + "/nix/store/00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432/", + (StorePath::from_string("00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432").unwrap(), PathBuf::new()) + ; "without prefix, but trailing slash")] + #[test_case( + "/nix/store/00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432/bin/arp", + (StorePath::from_string("00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432").unwrap(), PathBuf::from("bin/arp")) + ; "with prefix")] + #[test_case( + "/nix/store/00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432/bin/arp/", + (StorePath::from_string("00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432").unwrap(), PathBuf::from("bin/arp/")) + ; "with prefix and trailing slash")] + fn from_absolute_path_full(s: &str, expected: (StorePath, PathBuf)) { + let actual = StorePath::from_absolute_path_full(s).expect("must succeed"); + assert_eq!(expected, actual); + } + + #[test] + fn from_absolute_path_errors() { + assert_eq!( + Error::InvalidName(NameError::InvalidName("".to_string())), + StorePath::from_absolute_path_full("/nix/store/").expect_err("must fail") + ); + assert_eq!( + Error::InvalidName(NameError::InvalidName("".to_string())), + StorePath::from_absolute_path_full("/nix/store/foo").expect_err("must fail") + ); + assert_eq!( + Error::MissingStoreDir(), + StorePath::from_absolute_path_full( + "00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432" + ) + .expect_err("must fail") + ); + } +} diff --git a/tvix/nix-compat/src/store_path/utils.rs b/tvix/nix-compat/src/store_path/utils.rs new file mode 100644 index 000000000000..fd3785568bac --- /dev/null +++ b/tvix/nix-compat/src/store_path/utils.rs @@ -0,0 +1,289 @@ +use crate::nixbase32; +use crate::nixhash::{HashAlgo, NixHash, NixHashWithMode}; +use crate::store_path::StorePath; +use sha2::{Digest, Sha256}; +use thiserror::Error; + +use super::{NameError, STORE_DIR}; + +/// Errors that can occur when creating a content-addressed store path. +/// +/// This wraps the main [Error] which is just about invalid store path names. +#[derive(Debug, PartialEq, Eq, Error)] +pub enum BuildStorePathError { + #[error("{0}")] + InvalidName(NameError), + /// This error occurs when we have references outside the SHA-256 + + /// Recursive case. The restriction comes from upstream Nix. It may be + /// lifted at some point but there isn't a pressing need to anticipate that. + #[error("References were not supported as much as requested")] + InvalidReference(), +} + +/// compress_hash takes an arbitrarily long sequence of bytes (usually +/// a hash digest), and returns a sequence of bytes of length +/// OUTPUT_SIZE. +/// +/// It's calculated by rotating through the bytes in the output buffer +/// (zero- initialized), and XOR'ing with each byte of the passed +/// input. It consumes 1 byte at a time, and XOR's it with the current +/// value in the output buffer. +/// +/// This mimics equivalent functionality in C++ Nix. +pub fn compress_hash<const OUTPUT_SIZE: usize>(input: &[u8]) -> [u8; OUTPUT_SIZE] { + let mut output = [0; OUTPUT_SIZE]; + + for (ii, ch) in input.iter().enumerate() { + output[ii % OUTPUT_SIZE] ^= ch; + } + + output +} + +/// This builds a store path, by calculating the text_hash_string of either a +/// derivation or a literal text file that may contain references. +pub fn build_text_path<S: AsRef<str>, I: IntoIterator<Item = S>, C: AsRef<[u8]>>( + name: &str, + content: C, + references: I, +) -> Result<StorePath, NameError> { + build_store_path_from_fingerprint_parts( + &make_type("text", references, false), + // the nix_hash_string representation of the sha256 digest of some contents + &{ + let content_digest = { + let hasher = Sha256::new_with_prefix(content); + hasher.finalize() + }; + NixHash::new(crate::nixhash::HashAlgo::Sha256, content_digest.to_vec()) + }, + name, + ) +} + +/// This builds a more "regular" content-addressed store path +pub fn build_regular_ca_path<S: AsRef<str>, I: IntoIterator<Item = S>>( + name: &str, + hash_with_mode: &NixHashWithMode, + references: I, + self_reference: bool, +) -> Result<StorePath, BuildStorePathError> { + match &hash_with_mode { + NixHashWithMode::Recursive( + ref hash @ NixHash { + algo: HashAlgo::Sha256, + .. + }, + ) => build_store_path_from_fingerprint_parts( + &make_type("source", references, self_reference), + hash, + name, + ) + .map_err(BuildStorePathError::InvalidName), + _ => { + if references.into_iter().next().is_some() { + return Err(BuildStorePathError::InvalidReference()); + } + if self_reference { + return Err(BuildStorePathError::InvalidReference()); + } + build_store_path_from_fingerprint_parts( + "output:out", + &{ + let content_digest = { + let mut hasher = Sha256::new_with_prefix("fixed:out:"); + hasher.update(hash_with_mode.mode().prefix()); + hasher.update(hash_with_mode.digest().algo.to_string()); + hasher.update(":"); + hasher.update( + &data_encoding::HEXLOWER.encode(&hash_with_mode.digest().digest), + ); + hasher.update(":"); + hasher.finalize() + }; + NixHash::new(crate::nixhash::HashAlgo::Sha256, content_digest.to_vec()) + }, + name, + ) + .map_err(BuildStorePathError::InvalidName) + } + } +} + +/// This builds an input-addressed store path +/// +/// Input-addresed store paths are always derivation outputs, the "input" in question is the +/// derivation and its closure. +pub fn build_output_path( + drv_hash: &NixHash, + output_name: &str, + output_path_name: &str, +) -> Result<StorePath, NameError> { + build_store_path_from_fingerprint_parts( + &(String::from("output:") + output_name), + drv_hash, + output_path_name, + ) +} + +/// This builds a store path from fingerprint parts. +/// Usually, that function is used from [build_text_path] and +/// passed a "text hash string" (starting with "text:" as fingerprint), +/// but other fingerprints starting with "output:" are also used in Derivation +/// output path calculation. +/// +/// The fingerprint is hashed with sha256, its digest is compressed to 20 bytes, +/// and nixbase32-encoded (32 characters). +fn build_store_path_from_fingerprint_parts( + ty: &str, + hash: &NixHash, + name: &str, +) -> Result<StorePath, NameError> { + let fingerprint = + String::from(ty) + ":" + &hash.to_nix_hash_string() + ":" + STORE_DIR + ":" + name; + let digest = { + let hasher = Sha256::new_with_prefix(fingerprint); + hasher.finalize() + }; + let compressed = compress_hash::<20>(&digest); + StorePath::validate_name(name)?; + Ok(StorePath { + digest: compressed, + name: name.to_string(), + }) +} + +/// This contains the Nix logic to create "text hash strings", which are used +/// in `builtins.toFile`, as well as in Derivation Path calculation. +/// +/// A text hash is calculated by concatenating the following fields, separated by a `:`: +/// +/// - text +/// - references, individually joined by `:` +/// - the nix_hash_string representation of the sha256 digest of some contents +/// - the value of `storeDir` +/// - the name +fn make_type<S: AsRef<str>, I: IntoIterator<Item = S>>( + ty: &str, + references: I, + self_ref: bool, +) -> String { + let mut s = String::from(ty); + + for reference in references { + s.push(':'); + s.push_str(reference.as_ref()); + } + + if self_ref { + s.push_str(":self"); + } + + s +} + +/// Nix placeholders (i.e. values returned by `builtins.placeholder`) +/// are used to populate outputs with paths that must be +/// string-replaced with the actual placeholders later, at runtime. +/// +/// The actual placeholder is basically just a SHA256 hash encoded in +/// cppnix format. +pub fn hash_placeholder(name: &str) -> String { + let digest = { + let mut hasher = Sha256::new(); + hasher.update(format!("nix-output:{}", name)); + hasher.finalize() + }; + + format!("/{}", nixbase32::encode(&digest)) +} + +#[cfg(test)] +mod test { + use super::*; + use crate::nixhash::{NixHash, NixHashWithMode}; + + #[test] + fn build_text_path_with_zero_references() { + // This hash should match `builtins.toFile`, e.g.: + // + // nix-repl> builtins.toFile "foo" "bar" + // "/nix/store/vxjiwkjkn7x4079qvh1jkl5pn05j2aw0-foo" + + let store_path = build_text_path("foo", "bar", Vec::<String>::new()) + .expect("build_store_path() should succeed"); + + assert_eq!( + store_path.to_absolute_path().as_str(), + "/nix/store/vxjiwkjkn7x4079qvh1jkl5pn05j2aw0-foo" + ); + } + + #[test] + fn build_text_path_with_non_zero_references() { + // This hash should match: + // + // nix-repl> builtins.toFile "baz" "${builtins.toFile "foo" "bar"}" + // "/nix/store/5xd714cbfnkz02h2vbsj4fm03x3f15nf-baz" + + let inner = build_text_path("foo", "bar", Vec::<String>::new()) + .expect("path_with_references() should succeed"); + let inner_path = inner.to_absolute_path(); + + let outer = build_text_path("baz", &inner_path, vec![inner_path.as_str()]) + .expect("path_with_references() should succeed"); + + assert_eq!( + outer.to_absolute_path().as_str(), + "/nix/store/5xd714cbfnkz02h2vbsj4fm03x3f15nf-baz" + ); + } + + #[test] + fn build_sha1_path() { + let outer = build_regular_ca_path( + "bar", + &NixHashWithMode::Recursive(NixHash { + algo: HashAlgo::Sha1, + digest: data_encoding::HEXLOWER + .decode(b"0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33") + .expect("hex should decode"), + }), + Vec::<String>::new(), + false, + ) + .expect("path_with_references() should succeed"); + + assert_eq!( + outer.to_absolute_path().as_str(), + "/nix/store/mp57d33657rf34lzvlbpfa1gjfv5gmpg-bar" + ); + } + + #[test] + fn build_store_path_with_non_zero_references() { + // This hash should match: + // + // nix-repl> builtins.toFile "baz" "${builtins.toFile "foo" "bar"}" + // "/nix/store/5xd714cbfnkz02h2vbsj4fm03x3f15nf-baz" + // + // $ nix store make-content-addressed /nix/store/5xd714cbfnkz02h2vbsj4fm03x3f15nf-baz + // rewrote '/nix/store/5xd714cbfnkz02h2vbsj4fm03x3f15nf-baz' to '/nix/store/s89y431zzhmdn3k8r96rvakryddkpv2v-baz' + let outer = build_regular_ca_path( + "baz", + &NixHashWithMode::Recursive(NixHash { + algo: HashAlgo::Sha256, + digest: nixbase32::decode(b"1xqkzcb3909fp07qngljr4wcdnrh1gdam1m2n29i6hhrxlmkgkv1") + .expect("hex should decode"), + }), + vec!["/nix/store/dxwkwjzdaq7ka55pkk252gh32bgpmql4-foo"], + false, + ) + .expect("path_with_references() should succeed"); + + assert_eq!( + outer.to_absolute_path().as_str(), + "/nix/store/s89y431zzhmdn3k8r96rvakryddkpv2v-baz" + ); + } +} diff --git a/tvix/nix_cli/Cargo.toml b/tvix/nix_cli/Cargo.toml new file mode 100644 index 000000000000..f9de2b97706b --- /dev/null +++ b/tvix/nix_cli/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "nix-cli" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies.clap] +version = "4.0" + +[dependencies.tempfile] +version = "3.2.0" + +[[bin]] +name = "nix-store" + +[features] +integration_tests = [] diff --git a/tvix/nix_cli/default.nix b/tvix/nix_cli/default.nix new file mode 100644 index 000000000000..4c79e46cabfa --- /dev/null +++ b/tvix/nix_cli/default.nix @@ -0,0 +1,5 @@ +{ depot, ... }: + +depot.tvix.crates.workspaceMembers.nix-cli.build.override { + runTests = true; +} diff --git a/tvix/nix_cli/src/bin/nix-store.rs b/tvix/nix_cli/src/bin/nix-store.rs new file mode 100644 index 000000000000..c6f7d00a4df8 --- /dev/null +++ b/tvix/nix_cli/src/bin/nix-store.rs @@ -0,0 +1,105 @@ +fn main() { + main_args(std::env::args().collect()).unwrap_or_else(|e| e.exit()); +} + +pub fn main_args(args: Vec<String>) -> clap::error::Result<NixResult> { + let matches = clap::Command::new("nix-store") + .subcommand(clap::Command::new("--add").arg(clap::Arg::new("FILE").required(true).index(1))) + .try_get_matches_from(args.iter())?; + if let Some(add) = matches.subcommand_matches("--add") { + let file = add.get_one::<String>("FILE").expect("--add needs a file"); + let file_contents = + std::fs::read_to_string(file).expect(&format!("file {} does not exist", file)); + Ok(NixResult::FileAddedToStore { + content: file_contents, + }) + } else { + panic!("read some arguments that we do not know: {:?}", args) + } +} + +#[derive(Debug, Eq, PartialEq)] +pub enum NixResult { + FileAddedToStore { content: String }, +} + +#[cfg(test)] +mod integration_tests { + use std::collections::VecDeque; + use std::io::Write; + + use super::*; + + #[derive(Debug)] + enum NixOutput { + Err { + status: i32, + stdout: String, + stderr: String, + }, + Ok { + stdout: String, + stderr: String, + }, + } + + fn run_nix_command(cmd: &str, args: Vec<String>) -> NixOutput { + let out = std::process::Command::new(cmd) + .args(args) + .output() + .expect(&format!("could not run {}", cmd)); + match out.status.code().expect("no status code!") { + 0 => NixOutput::Ok { + stdout: String::from_utf8_lossy(&out.stdout).trim_end().to_string(), + stderr: String::from_utf8_lossy(&out.stderr).trim_end().to_string(), + }, + status => NixOutput::Err { + status, + stdout: String::from_utf8_lossy(&out.stdout).trim_end().to_string(), + stderr: String::from_utf8_lossy(&out.stderr).trim_end().to_string(), + }, + } + } + + fn nix_nix_store<'a>(args: Vec<String>) -> NixResult { + match run_nix_command("nix-store", args) { + err @ NixOutput::Err { .. } => panic!("nix-store --add failed: {:#?}", err), + NixOutput::Ok { stdout, .. } => NixResult::FileAddedToStore { + content: std::fs::read_to_string(&stdout) + .expect(&format!("cannot open {} as store file", stdout)), + }, + } + } + + fn tvix_nix_store<'a>(args: Vec<String>) -> NixResult { + eprintln!("running tvix with arguments {:?}", args); + let mut args = VecDeque::from(args); + args.push_front("tvix-store".to_string()); + super::main_args(Vec::from(args)) + .unwrap_or_else(|e| panic!("clap command line parsing failed:\n{}", e)) + } + + #[test] + #[cfg_attr(not(feature = "integration_tests"), ignore)] + fn test_nix_store_add() { + let file_content = "I am a copied file"; + let mut tempfile = tempfile::NamedTempFile::new().expect("cannot create temp file"); + tempfile + .write_all(file_content.as_bytes()) + .expect("could not write to tempfile"); + assert_eq!( + tvix_nix_store(vec![ + "--add".to_string(), + tempfile.path().as_os_str().to_string_lossy().into_owned() + ]), + nix_nix_store(vec![ + "--add".to_string(), + tempfile.path().as_os_str().to_string_lossy().into_owned() + ]), + "added file contents were not the same" + ); + + // make sure the tempfile lives till here + drop(tempfile) + } +} diff --git a/tvix/nix_cli/src/main.rs b/tvix/nix_cli/src/main.rs new file mode 100644 index 000000000000..40086e6f27ee --- /dev/null +++ b/tvix/nix_cli/src/main.rs @@ -0,0 +1,3 @@ +fn main() { + println!("Hello, tvix!"); +} diff --git a/tvix/proto/LICENSE b/tvix/proto/LICENSE new file mode 100644 index 000000000000..2034ada6fd9a --- /dev/null +++ b/tvix/proto/LICENSE @@ -0,0 +1,21 @@ +Copyright © The Tvix Authors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +“Software”), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/tvix/proto/default.nix b/tvix/proto/default.nix new file mode 100644 index 000000000000..35e2eba7fed4 --- /dev/null +++ b/tvix/proto/default.nix @@ -0,0 +1,9 @@ +# Build protocol buffer definitions to ensure that protos are valid in +# CI. Note that the output of this build target is not actually used +# anywhere, it just functions as a CI check for now. +{ pkgs, ... }: + +pkgs.runCommand "tvix-cc-proto" { } '' + mkdir $out + ${pkgs.protobuf}/bin/protoc -I ${./.} evaluator.proto --cpp_out=$out +'' diff --git a/tvix/proto/evaluator.proto b/tvix/proto/evaluator.proto new file mode 100644 index 000000000000..96431a0b3bfb --- /dev/null +++ b/tvix/proto/evaluator.proto @@ -0,0 +1,144 @@ +// SPDX-License-Identifier: MIT +// Copyright © 2021 The Tvix Authors +syntax = "proto3"; + +package tvix.proto.v1; + +service EvaluatorService { + rpc Evaluate(stream EvaluateRequest) returns (stream EvaluateResponse) {} +} + +// +// Message types for EvaluateRequest +// + +message EvaluateFile { + // Absolute path at which the evaluator can find the file to be + // evaluated. + string file_path = 1; + + // Optional attribute that should be evaluated within the file, + // assuming that the value it evaluates to is an attribute set. + optional string attribute = 2; + + // Additional arguments to pass into the evaluation, with which the + // file's top-level function will be auto-called. + map<string, NixValue> arguments = 3; +} + +message EvaluateExpression { + // Literal Nix expression to evaluate. + string expression = 1; + + // Working directory in which the expression should be evaluated. + string working_directory = 2; +} + +message BuildResultChunk { + string drv_hash = 1; + string output = 2; + bytes data = 3; + + // This field may be set on the first build result chunk returned + // to the evaluator, indicating the total size of the output that + // is going to be streamed in bytes. + // + // If set, the evaluator can use this to appropriately allocate a + // buffer for the output. + optional int64 output_size = 4; +} + +// Indicates that a single build has completed successfully. In case +// that the build outputs were required by the evaluator this also +// indicates that the output has been returned completely. +message BuildSuccess { + string drv_hash = 1; + string output = 2; +} + +// Describes an error that occured during a single build. +// +// TODO: We might want a more sophisticated error type. +message BuildError { + string drv_hash = 1; + string output = 2; + string error = 3; +} + +message BuildResult { + oneof build_result { + BuildSuccess build_success = 1; + BuildError build_error = 2; + } +} + + +/// Messages sent to the evaluator by the build coordinator. +message EvaluateRequest { + oneof message { + // Ask the evaluator to evaluate the specified file, and + // optionally attribute within that file. Must be the first + // message. + EvaluateFile evaluate_file = 1; + + // Ask the evaluator to evaluate the specified Nix expression. + // Must be the first message. + EvaluateExpression evaluate_expression = 2; + + // Send the chunks of a build result, in response to a + // BuildRequest. + // + // Note: This message might change as the store protocol is + // designed, as it is possible that mechanisms for transferring + // files might be reused between the protocols. + BuildResultChunk build_result_chunk = 3; + + // Indicate the result of a single build. See the documentation + // for the message types defined above for semantic details. + BuildResult build_result = 4; + } +} + +// +// Message types for EvaluateResponse +// + +// TODO: Placeholder type. +message Derivation { + string drv = 1; +} + +// TODO: Placeholder type. +message NixValue { + string value = 1; +} + +// TODO: Placeholder type. +message NixError { + string value = 1; +} + +message BuildRequest { + Derivation drv = 1; + string output = 2; +} + +// Messages returned to the coordinator by the evaluator. +message EvaluateResponse { + oneof message { + // A derivation that was instantiated while reducing the graph, + // and whose output is not required by the evaluator. + Derivation derivation = 1; + + // A derivation that was instantiated while reducing the graph, + // and whose output is required by the evaluator (IFD). + BuildRequest build_request = 2; + + // The final value yielded by the evaluation. Stream is closed + // after this. + NixValue done = 3; + + // Evaluation error. Stream is closed after this. + NixError error = 4; + } +} diff --git a/tvix/serde/.skip-subtree b/tvix/serde/.skip-subtree new file mode 100644 index 000000000000..21b2d0d3580d --- /dev/null +++ b/tvix/serde/.skip-subtree @@ -0,0 +1 @@ +The foods.nix can not be read by readTree. diff --git a/tvix/serde/Cargo.toml b/tvix/serde/Cargo.toml new file mode 100644 index 000000000000..8ffc11a4eb6e --- /dev/null +++ b/tvix/serde/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "tvix-serde" +version = "0.1.0" +edition = "2021" + +[dependencies] +tvix-eval = { path = "../eval" } +serde = { version = "1.0", features = ["derive"] } diff --git a/tvix/serde/default.nix b/tvix/serde/default.nix new file mode 100644 index 000000000000..5880bd24f663 --- /dev/null +++ b/tvix/serde/default.nix @@ -0,0 +1,5 @@ +{ depot, ... }: + +depot.tvix.crates.workspaceMembers.tvix-serde.build.override { + runTests = true; +} diff --git a/tvix/serde/examples/cfg-demo.rs b/tvix/serde/examples/cfg-demo.rs new file mode 100644 index 000000000000..5774a81f7752 --- /dev/null +++ b/tvix/serde/examples/cfg-demo.rs @@ -0,0 +1,35 @@ +//! This program demonstrates how to use tvix_serde to deserialise +//! program configuration (or other data) from Nix code. +//! +//! This makes it possible to use Nix as an embedded config language. +//! For greater control over evaluation, and for features like adding +//! additional builtins, depending directly on tvix_eval would be +//! required. +use serde::Deserialize; +use std::collections::HashMap; + +#[derive(Debug, Deserialize)] +enum Flavour { + Tasty, + Okay, + Eww, +} + +#[allow(dead_code)] +#[derive(Debug, Deserialize)] +struct Data { + name: String, + foods: HashMap<String, Flavour>, +} + +fn main() { + // Get the content from wherever, read it from a file, receive it + // over the network - whatever floats your boat! We'll include it + // as a string. + let code = include_str!("foods.nix"); + + // Now you can use tvix_serde to deserialise the struct: + let foods: Data = tvix_serde::from_str(code).expect("deserialisation should succeed"); + + println!("These are the foods:\n{:#?}", foods); +} diff --git a/tvix/serde/examples/foods.nix b/tvix/serde/examples/foods.nix new file mode 100644 index 000000000000..c8733cd3efec --- /dev/null +++ b/tvix/serde/examples/foods.nix @@ -0,0 +1,22 @@ +# This is content for the `Data` struct, written in intentionally +# convoluted Nix code. +let + mkFlavour = flavour: name: { + inherit name; + value = flavour; + }; + + tasty = mkFlavour "Tasty"; + okay = mkFlavour "Okay"; + eww = mkFlavour "Eww"; +in +{ + name = "exhaustive list of foods"; + + foods = builtins.listToAttrs [ + (tasty "beef") + (okay "tomatoes") + (eww "olives") + (tasty "coffee") + ]; +} diff --git a/tvix/serde/src/de.rs b/tvix/serde/src/de.rs new file mode 100644 index 000000000000..43efc71c6f67 --- /dev/null +++ b/tvix/serde/src/de.rs @@ -0,0 +1,447 @@ +//! Deserialisation from Nix to Rust values. + +use serde::de::value::{MapDeserializer, SeqDeserializer}; +use serde::de::{self, EnumAccess, VariantAccess}; +use tvix_eval::Value; + +use crate::error::Error; + +struct NixDeserializer { + value: tvix_eval::Value, +} + +impl NixDeserializer { + fn new(value: Value) -> Self { + if let Value::Thunk(thunk) = value { + Self::new(thunk.value().clone()) + } else { + Self { value } + } + } +} + +impl de::IntoDeserializer<'_, Error> for NixDeserializer { + type Deserializer = Self; + + fn into_deserializer(self) -> Self::Deserializer { + self + } +} + +pub fn from_str<'code, T>(src: &'code str) -> Result<T, Error> +where + T: serde::Deserialize<'code>, +{ + // First step is to evaluate the Nix code ... + let mut eval = tvix_eval::Evaluation::new(src, None); + eval.strict = true; + let source = eval.source_map(); + let result = eval.evaluate(); + + if !result.errors.is_empty() { + return Err(Error::NixErrors { + errors: result.errors, + source, + }); + } + + let de = NixDeserializer::new(result.value.expect("value should be present on success")); + + T::deserialize(de) +} + +fn unexpected(expected: &'static str, got: &Value) -> Error { + Error::UnexpectedType { + expected, + got: got.type_of(), + } +} + +fn visit_integer<I: TryFrom<i64>>(v: &Value) -> Result<I, Error> { + match v { + Value::Integer(i) => I::try_from(*i).map_err(|_| Error::IntegerConversion { + got: *i, + need: std::any::type_name::<I>(), + }), + + _ => Err(unexpected("integer", v)), + } +} + +impl<'de> de::Deserializer<'de> for NixDeserializer { + type Error = Error; + + fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + match self.value { + Value::Null => visitor.visit_unit(), + Value::Bool(b) => visitor.visit_bool(b), + Value::Integer(i) => visitor.visit_i64(i), + Value::Float(f) => visitor.visit_f64(f), + Value::String(s) => visitor.visit_string(s.to_string()), + Value::Path(p) => visitor.visit_string(p.to_string_lossy().into()), // TODO: hmm + Value::Attrs(_) => self.deserialize_map(visitor), + Value::List(_) => self.deserialize_seq(visitor), + + // tvix-eval types that can not be deserialized through serde. + Value::Closure(_) + | Value::Builtin(_) + | Value::Thunk(_) + | Value::AttrNotFound + | Value::Blueprint(_) + | Value::DeferredUpvalue(_) + | Value::UnresolvedPath(_) + | Value::Json(_) => Err(Error::Unserializable { + value_type: self.value.type_of(), + }), + } + } + + fn deserialize_bool<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + match self.value { + Value::Bool(b) => visitor.visit_bool(b), + _ => Err(unexpected("bool", &self.value)), + } + } + + fn deserialize_i8<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + visitor.visit_i8(visit_integer(&self.value)?) + } + + fn deserialize_i16<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + visitor.visit_i16(visit_integer(&self.value)?) + } + + fn deserialize_i32<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + visitor.visit_i32(visit_integer(&self.value)?) + } + + fn deserialize_i64<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + visitor.visit_i64(visit_integer(&self.value)?) + } + + fn deserialize_u8<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + visitor.visit_u8(visit_integer(&self.value)?) + } + + fn deserialize_u16<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + visitor.visit_u16(visit_integer(&self.value)?) + } + + fn deserialize_u32<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + visitor.visit_u32(visit_integer(&self.value)?) + } + + fn deserialize_u64<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + visitor.visit_u64(visit_integer(&self.value)?) + } + + fn deserialize_f32<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + if let Value::Float(f) = self.value { + return visitor.visit_f32(f as f32); + } + + Err(unexpected("float", &self.value)) + } + + fn deserialize_f64<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + if let Value::Float(f) = self.value { + return visitor.visit_f64(f); + } + + Err(unexpected("float", &self.value)) + } + + fn deserialize_char<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + if let Value::String(s) = &self.value { + let chars = s.as_str().chars().collect::<Vec<_>>(); + if chars.len() == 1 { + return visitor.visit_char(chars[0]); + } + } + + Err(unexpected("char", &self.value)) + } + + fn deserialize_str<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + if let Value::String(s) = &self.value { + return visitor.visit_str(s.as_str()); + } + + Err(unexpected("string", &self.value)) + } + + fn deserialize_string<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + if let Value::String(s) = &self.value { + return visitor.visit_str(s.as_str()); + } + + Err(unexpected("string", &self.value)) + } + + fn deserialize_bytes<V>(self, _visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + unimplemented!() + } + + fn deserialize_byte_buf<V>(self, _visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + unimplemented!() + } + + // Note that this can not distinguish between a serialisation of + // `Some(())` and `None`. + fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + if let Value::Null = self.value { + visitor.visit_none() + } else { + visitor.visit_some(self) + } + } + + fn deserialize_unit<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + if let Value::Null = self.value { + return visitor.visit_unit(); + } + + Err(unexpected("null", &self.value)) + } + + fn deserialize_unit_struct<V>( + self, + _name: &'static str, + visitor: V, + ) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + self.deserialize_unit(visitor) + } + + fn deserialize_newtype_struct<V>( + self, + _name: &'static str, + visitor: V, + ) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + visitor.visit_newtype_struct(self) + } + + fn deserialize_seq<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + if let Value::List(list) = self.value { + let mut seq = + SeqDeserializer::new(list.into_iter().map(|value| NixDeserializer::new(value))); + let result = visitor.visit_seq(&mut seq)?; + seq.end()?; + return Ok(result); + } + + Err(unexpected("list", &self.value)) + } + + fn deserialize_tuple<V>(self, _len: usize, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + // just represent tuples as lists ... + self.deserialize_seq(visitor) + } + + fn deserialize_tuple_struct<V>( + self, + _name: &'static str, + _len: usize, + visitor: V, + ) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + // same as above + self.deserialize_seq(visitor) + } + + fn deserialize_map<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + if let Value::Attrs(attrs) = self.value { + let mut map = MapDeserializer::new(attrs.into_iter().map(|(k, v)| { + ( + NixDeserializer::new(Value::String(k)), + NixDeserializer::new(v), + ) + })); + let result = visitor.visit_map(&mut map)?; + map.end()?; + return Ok(result); + } + + Err(unexpected("map", &self.value)) + } + + fn deserialize_struct<V>( + self, + _name: &'static str, + _fields: &'static [&'static str], + visitor: V, + ) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + self.deserialize_map(visitor) + } + + // This method is responsible for deserializing the externally + // tagged enum variant serialisation. + fn deserialize_enum<V>( + self, + name: &'static str, + _variants: &'static [&'static str], + visitor: V, + ) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + match self.value { + // a string represents a unit variant + Value::String(s) => visitor.visit_enum(de::value::StrDeserializer::new(s.as_str())), + + // an attribute set however represents an externally + // tagged enum with content + Value::Attrs(attrs) => visitor.visit_enum(Enum(*attrs)), + + _ => Err(unexpected(name, &self.value)), + } + } + + fn deserialize_identifier<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + self.deserialize_str(visitor) + } + + fn deserialize_ignored_any<V>(self, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + visitor.visit_unit() + } +} + +struct Enum(tvix_eval::NixAttrs); + +impl<'de> EnumAccess<'de> for Enum { + type Error = Error; + type Variant = NixDeserializer; + + // TODO: pass the known variants down here and check against them + fn variant_seed<V>(self, seed: V) -> Result<(V::Value, Self::Variant), Self::Error> + where + V: de::DeserializeSeed<'de>, + { + if self.0.len() != 1 { + return Err(Error::AmbiguousEnum); + } + + let (key, value) = self.0.into_iter().next().expect("length asserted above"); + let val = seed.deserialize(de::value::StrDeserializer::<Error>::new(key.as_str()))?; + + Ok((val, NixDeserializer::new(value))) + } +} + +impl<'de> VariantAccess<'de> for NixDeserializer { + type Error = Error; + + fn unit_variant(self) -> Result<(), Self::Error> { + // If this case is hit, a user specified the name of a unit + // enum variant but gave it content. Unit enum deserialisation + // is handled in `deserialize_enum` above. + Err(Error::UnitEnumContent) + } + + fn newtype_variant_seed<T>(self, seed: T) -> Result<T::Value, Self::Error> + where + T: de::DeserializeSeed<'de>, + { + seed.deserialize(self) + } + + fn tuple_variant<V>(self, _len: usize, visitor: V) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + de::Deserializer::deserialize_seq(self, visitor) + } + + fn struct_variant<V>( + self, + _fields: &'static [&'static str], + visitor: V, + ) -> Result<V::Value, Self::Error> + where + V: de::Visitor<'de>, + { + de::Deserializer::deserialize_map(self, visitor) + } +} diff --git a/tvix/serde/src/de_tests.rs b/tvix/serde/src/de_tests.rs new file mode 100644 index 000000000000..8fe15a17e378 --- /dev/null +++ b/tvix/serde/src/de_tests.rs @@ -0,0 +1,200 @@ +use serde::Deserialize; +use std::collections::HashMap; + +use crate::de::from_str; + +#[test] +fn deserialize_none() { + let result: Option<usize> = from_str("null").expect("should deserialize"); + assert_eq!(None, result); +} + +#[test] +fn deserialize_some() { + let result: Option<usize> = from_str("40 + 2").expect("should deserialize"); + assert_eq!(Some(42), result); +} + +#[test] +fn deserialize_string() { + let result: String = from_str( + r#" + let greeter = name: "Hello ${name}!"; + in greeter "Slartibartfast" + "#, + ) + .expect("should deserialize"); + + assert_eq!(result, "Hello Slartibartfast!"); +} + +#[test] +fn deserialize_empty_list() { + let result: Vec<usize> = from_str("[ ]").expect("should deserialize"); + assert!(result.is_empty()) +} + +#[test] +fn deserialize_integer_list() { + let result: Vec<usize> = + from_str("builtins.map (n: n + 2) [ 21 40 67 ]").expect("should deserialize"); + assert_eq!(result, vec![23, 42, 69]); +} + +#[test] +fn deserialize_empty_map() { + let result: HashMap<String, usize> = from_str("{ }").expect("should deserialize"); + assert!(result.is_empty()); +} + +#[test] +fn deserialize_integer_map() { + let result: HashMap<String, usize> = from_str("{ age = 40 + 2; }").expect("should deserialize"); + assert_eq!(result.len(), 1); + assert_eq!(*result.get("age").unwrap(), 42); +} + +#[test] +fn deserialize_struct() { + #[derive(Debug, Deserialize, PartialEq)] + struct Person { + name: String, + age: usize, + } + + let result: Person = from_str( + r#" + { + name = "Slartibartfast"; + age = 42; + } + "#, + ) + .expect("should deserialize"); + + assert_eq!( + result, + Person { + name: "Slartibartfast".into(), + age: 42, + } + ); +} + +#[test] +fn deserialize_newtype() { + #[derive(Debug, Deserialize, PartialEq)] + struct Number(usize); + + let result: Number = from_str("42").expect("should deserialize"); + assert_eq!(result, Number(42)); +} + +#[test] +fn deserialize_tuple() { + let result: (String, usize) = from_str(r#" [ "foo" 42 ] "#).expect("should deserialize"); + assert_eq!(result, ("foo".into(), 42)); +} + +#[test] +fn deserialize_unit_enum() { + #[derive(Debug, Deserialize, PartialEq)] + enum Foo { + Bar, + Baz, + } + + let result: Foo = from_str("\"Baz\"").expect("should deserialize"); + assert_eq!(result, Foo::Baz); +} + +#[test] +fn deserialize_tuple_enum() { + #[derive(Debug, Deserialize, PartialEq)] + enum Foo { + Bar, + Baz(String, usize), + } + + let result: Foo = from_str( + r#" + { + Baz = [ "Slartibartfast" 42 ]; + } + "#, + ) + .expect("should deserialize"); + + assert_eq!(result, Foo::Baz("Slartibartfast".into(), 42)); +} + +#[test] +fn deserialize_struct_enum() { + #[derive(Debug, Deserialize, PartialEq)] + enum Foo { + Bar, + Baz { name: String, age: usize }, + } + + let result: Foo = from_str( + r#" + { + Baz.name = "Slartibartfast"; + Baz.age = 42; + } + "#, + ) + .expect("should deserialize"); + + assert_eq!( + result, + Foo::Baz { + name: "Slartibartfast".into(), + age: 42 + } + ); +} + +#[test] +fn deserialize_enum_all() { + #[derive(Debug, Deserialize, PartialEq)] + #[serde(rename_all = "snake_case")] + enum TestEnum { + UnitVariant, + TupleVariant(String, String), + StructVariant { name: String, age: usize }, + } + + let result: Vec<TestEnum> = from_str( + r#" + let + mkTuple = country: drink: { tuple_variant = [ country drink ]; }; + in + [ + (mkTuple "UK" "cask ale") + + "unit_variant" + + { + struct_variant.name = "Slartibartfast"; + struct_variant.age = 42; + } + + (mkTuple "Russia" "квас") + ] + "#, + ) + .expect("should deserialize"); + + let expected = vec![ + TestEnum::TupleVariant("UK".into(), "cask ale".into()), + TestEnum::UnitVariant, + TestEnum::StructVariant { + name: "Slartibartfast".into(), + age: 42, + }, + TestEnum::TupleVariant("Russia".into(), "квас".into()), + ]; + + assert_eq!(result, expected); +} diff --git a/tvix/serde/src/error.rs b/tvix/serde/src/error.rs new file mode 100644 index 000000000000..f206b830e95f --- /dev/null +++ b/tvix/serde/src/error.rs @@ -0,0 +1,102 @@ +//! When serialising Nix goes wrong ... + +use std::error; +use std::fmt::Display; + +#[derive(Clone, Debug)] +pub enum Error { + /// Attempted to deserialise an unsupported Nix value (such as a + /// function) that can not be represented by the + /// [`serde::Deserialize`] trait. + Unserializable { value_type: &'static str }, + + /// Expected to deserialize a value that is unsupported by Nix. + Unsupported { wanted: &'static str }, + + /// Expected a specific type, but got something else on the Nix side. + UnexpectedType { + expected: &'static str, + got: &'static str, + }, + + /// Deserialisation error returned from `serde::de`. + Deserialization(String), + + /// Deserialized integer did not fit. + IntegerConversion { got: i64, need: &'static str }, + + /// Evaluation of the supplied Nix code failed while computing the + /// value for deserialisation. + NixErrors { + errors: Vec<tvix_eval::Error>, + source: tvix_eval::SourceCode, + }, + + /// Could not determine an externally tagged enum representation. + AmbiguousEnum, + + /// Attempted to provide content to a unit enum. + UnitEnumContent, +} + +impl Display for Error { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Error::Unserializable { value_type } => write!( + f, + "can not deserialise a Nix '{}' into a Rust type", + value_type + ), + + Error::Unsupported { wanted } => { + write!(f, "can not deserialize a '{}' from a Nix value", wanted) + } + + Error::UnexpectedType { expected, got } => { + write!(f, "expected type {}, but got Nix type {}", expected, got) + } + + Error::NixErrors { errors, source } => { + writeln!( + f, + "{} occured during Nix evaluation: ", + if errors.len() == 1 { "error" } else { "errors" } + )?; + + for err in errors { + writeln!(f, "{}", err.fancy_format_str(&source))?; + } + + Ok(()) + } + + Error::Deserialization(err) => write!(f, "deserialisation error occured: {}", err), + + Error::IntegerConversion { got, need } => { + write!(f, "i64({}) does not fit in a {}", got, need) + } + + Error::AmbiguousEnum => write!(f, "could not determine enum variant: ambiguous keys"), + + Error::UnitEnumContent => write!(f, "provided content for unit enum variant"), + } + } +} + +impl error::Error for Error { + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + match self { + Self::NixErrors { errors, .. } => errors.first().map(|e| e as &dyn error::Error), + _ => None, + } + } +} + +impl serde::de::Error for Error { + fn custom<T>(err: T) -> Self + where + T: Display, + { + Self::Deserialization(err.to_string()) + } +} diff --git a/tvix/serde/src/lib.rs b/tvix/serde/src/lib.rs new file mode 100644 index 000000000000..41ad23b18587 --- /dev/null +++ b/tvix/serde/src/lib.rs @@ -0,0 +1,11 @@ +//! `tvix-serde` implements (de-)serialisation of Rust data structures +//! to/from Nix. This is intended to make it easy to use Nix as as +//! configuration language. + +mod de; +mod error; + +pub use de::from_str; + +#[cfg(test)] +mod de_tests; diff --git a/tvix/store/Cargo.toml b/tvix/store/Cargo.toml new file mode 100644 index 000000000000..a88bdefd7aac --- /dev/null +++ b/tvix/store/Cargo.toml @@ -0,0 +1,48 @@ +[package] +name = "tvix-store" +version = "0.1.0" +edition = "2021" + +[dependencies] +anyhow = "1.0.68" +blake3 = { version = "1.3.1", features = ["rayon", "std"] } +clap = { version = "4.0", features = ["derive", "env"] } +count-write = "0.1.0" +data-encoding = "2.3.3" +lazy_static = "1.4.0" +nix-compat = { path = "../nix-compat" } +prost = "0.11.2" +rayon = "1.6.1" +sha2 = "0.10.6" +sled = { version = "0.34.7", features = ["compression"] } +tvix-eval = { path = "../eval" } +thiserror = "1.0.38" +tokio-stream = "0.1.14" +tokio = { version = "1.28.0", features = ["rt-multi-thread", "net"] } +tonic = "0.8.2" +tracing = "0.1.37" +tracing-subscriber = { version = "0.3.16", features = ["json"] } +walkdir = "2.3.2" +tokio-util = { version = "0.7.8", features = ["io", "io-util"] } +tower = "0.4.13" +futures = "0.3.28" +bytes = "1.4.0" +smol_str = "0.2.0" +serde_json = "1.0" + +[dependencies.tonic-reflection] +optional = true +version = "0.5.0" + +[build-dependencies] +prost-build = "0.11.2" +tonic-build = "0.8.2" + +[dev-dependencies] +test-case = "2.2.2" +tempfile = "3.3.0" +tonic-mock = { git = "https://github.com/brainrake/tonic-mock", branch = "bump-dependencies" } + +[features] +default = ["reflection"] +reflection = ["tonic-reflection"] diff --git a/tvix/store/README.md b/tvix/store/README.md new file mode 100644 index 000000000000..7844264ca181 --- /dev/null +++ b/tvix/store/README.md @@ -0,0 +1,59 @@ +# //tvix/store + +This contains the code hosting the tvix-store. + +For the local store, Nix realizes files on the filesystem in `/nix/store` (and +maintains some metadata in a SQLite database). For "remote stores", it +communicates this metadata in NAR (Nix ARchive) and NARInfo format. + +Compared to the Nix model, `tvix-store` stores data on a much more granular +level than that, which provides more deduplication possibilities, and more +granular copying. + +However, enough information is preserved to still be able to render NAR and +NARInfo (handled by `//tvix/nar-bridge`). + +## More Information +Check the `protos/` subfolder for the definition of the exact RPC methods and +messages. + + +## Interacting with the GRPC service manually +The shell environment in `//tvix` provides `evans`, which is an interactive +REPL-based gPRC client. + +You can use it to connect to a `tvix-store` and call the various RPC methods. + +```shell +$ cargo run & +$ evans --host localhost --port 8000 -r repl + ______ + | ____| + | |__ __ __ __ _ _ __ ___ + | __| \ \ / / / _. | | '_ \ / __| + | |____ \ V / | (_| | | | | | \__ \ + |______| \_/ \__,_| |_| |_| |___/ + + more expressive universal gRPC client + + +tvix.store.v1@localhost:8000> service BlobService + +tvix.store.v1.BlobService@localhost:8000> call Put --bytes-from-file +data (TYPE_BYTES) => /run/current-system/system +{ + "digest": "KOM3/IHEx7YfInAnlJpAElYezq0Sxn9fRz7xuClwNfA=" +} + +tvix.store.v1.BlobService@localhost:8000> call Get --bytes-as-base64 +digest (TYPE_BYTES) => KOM3/IHEx7YfInAnlJpAElYezq0Sxn9fRz7xuClwNfA= +{ + "data": "eDg2XzY0LWxpbnV4" +} + +$ echo eDg2XzY0LWxpbnV4 | base64 -d +x86_64-linux +``` + +Thanks to `tvix-store` providing gRPC Server Reflection (with `reflection` +feature), you don't need to point `evans` to the `.proto` files. diff --git a/tvix/store/build.rs b/tvix/store/build.rs new file mode 100644 index 000000000000..e75f021a2bcf --- /dev/null +++ b/tvix/store/build.rs @@ -0,0 +1,32 @@ +use std::io::Result; + +fn main() -> Result<()> { + #[allow(unused_mut)] + let mut builder = tonic_build::configure(); + + #[cfg(feature = "reflection")] + { + let out_dir = std::path::PathBuf::from(std::env::var("OUT_DIR").unwrap()); + let descriptor_path = out_dir.join("tvix.store.v1.bin"); + + builder = builder.file_descriptor_set_path(descriptor_path); + }; + + builder.build_server(true).build_client(true).compile( + &[ + "tvix/store/protos/castore.proto", + "tvix/store/protos/pathinfo.proto", + "tvix/store/protos/rpc_blobstore.proto", + "tvix/store/protos/rpc_directory.proto", + "tvix/store/protos/rpc_pathinfo.proto", + ], + // If we are in running `cargo build` manually, using `../..` works fine, + // but in case we run inside a nix build, we need to instead point PROTO_ROOT + // to a sparseTree containing that structure. + &[match std::env::var_os("PROTO_ROOT") { + Some(proto_root) => proto_root.to_str().unwrap().to_owned(), + None => "../..".to_string(), + }], + )?; + Ok(()) +} diff --git a/tvix/store/default.nix b/tvix/store/default.nix new file mode 100644 index 000000000000..ec04629ab24b --- /dev/null +++ b/tvix/store/default.nix @@ -0,0 +1,5 @@ +{ depot, ... }: + +depot.tvix.crates.workspaceMembers.tvix-store.build.override { + runTests = true; +} diff --git a/tvix/store/docs/api.md b/tvix/store/docs/api.md new file mode 100644 index 000000000000..549ea703aec8 --- /dev/null +++ b/tvix/store/docs/api.md @@ -0,0 +1,280 @@ +tvix-store API +============== + +This document outlines the design of the API exposed by tvix-store, as +well as other implementations of this store protocol. + +The store API has four main consumers: + +1. The evaluator (or more correctly, the CLI/coordinator, in the Tvix + case) communicates with the store to: + + * Upload files and directories (e.g. from `builtins.path`, or `src = ./path` + Nix expressions). + * Read files from the store where necessary (e.g. when `nixpkgs` is + located in the store, or for IFD). + +2. The builder communicates with the store to: + + * Upload files and directories after a build, to persist build artifacts in + the store. + +3. Tvix clients (such as users that have Tvix installed, or, depending + on perspective, builder environments) expect the store to + "materialise" on disk to provide a directory layout with store + paths. + +4. Stores may communicate with other stores, to substitute already built store + paths, i.e. a store acts as a binary cache for other stores. + +The store API attempts to reuse parts of its API between these three +consumers by making similarities explicit in the protocol. This leads +to a protocol that is slightly more complex than a simple "file +upload/download"-system, but at significantly greater efficiency, both in terms +of deduplication opportunities as well as granularity. + +## The Store model + +Contents inside a tvix-store can be grouped into three different message types: + + * Blobs + * Directories + * PathInfo (see further down) + +(check `castore.md` for more detailled field descriptions) + +### Blobs +A blob object contains the literal file contents of regular (or executable) +files. + +### Directory +A directory object describes the direct children of a directory. + +It contains: + - name of child regular (or executable) files, and their [blake3][blake3] hash. + - name of child symlinks, and their target (as string) + - name of child directories, and their [blake3][blake3] hash (forming a Merkle DAG) + +### Content-addressed Store Model +For example, lets consider a directory layout like this, with some +imaginary hashes of file contents: + +``` +. +├── file-1.txt hash: 5891b5b522d5df086d0ff0b110fb +└── nested + └── file-2.txt hash: abc6fd595fc079d3114d4b71a4d8 +``` + +A hash for the *directory* `nested` can be created by creating the `Directory` +object: + +```json +{ + "directories": [], + "files": [{ + "name": "file-2.txt", + "digest": "abc6fd595fc079d3114d4b71a4d8", + "size": 123, + }], + "symlink": [], +} +``` + +And then hashing a serialised form of that data structure. We use the blake3 +hash of the canonical protobuf representation. Let's assume the hash was +`ff0029485729bcde993720749232`. + +To create the directory object one layer up, we now refer to our `nested` +directory object in `directories`, and to `file-1.txt` in `files`: + +```json +{ + "directories": [{ + "name": "nested", + "digest": "ff0029485729bcde993720749232", + "size": 1, + }], + "files": [{ + "name": "file-1.txt", + "digest": "5891b5b522d5df086d0ff0b110fb", + "size": 124, + }] +} +``` + +This Merkle DAG of Directory objects, and flat store of blobs can be used to +describe any file/directory/symlink inside a store path. Due to its content- +addressed nature, it'll automatically deduplicate (re-)used (sub)directories, +and allow substitution from any (untrusted) source. + +The thing that's now only missing is the metadata to map/"mounting" from the +content-addressed world to a physical path. + +### PathInfo +As most paths in the Nix store currently are input-addressed [^input-addressed], +we need something mapping from an input-addressed "output path hash" to the +contents in the content- addressed world. + +That's what `PathInfo` provides. It embeds the root node (Directory, File or +Symlink) at a given store path. + +The root nodes' `name` field is populated with the (base)name inside +`/nix/store`, so `xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx-pname-1.2.3`. + +The `PathInfo` message also stores references to other store paths, and some +more NARInfo-specific metadata (signatures, narhash, narsize). + + +## API overview + +There's three different services: + +### BlobService +`BlobService` can be used to store and retrieve blobs of data, used to host +regular file contents. + +It is content-addressed, using [blake3][blake3] +as a hashing function. + +As blake3 is a tree hash, there's an opportunity to do +[verified streaming][bao] of parts of the file, +which doesn't need to trust any more information than the root hash itself. +Future extensions of the `BlobService` protocol will enable this. + +### DirectoryService +`DirectoryService` allows lookups (and uploads) of `Directory` messages, and +whole reference graphs of them. + + +### PathInfoService +The PathInfo service provides lookups from an output path hash to a `PathInfo` +message. + +## Example flows + +Below there are some common usecases of tvix-store, and how the different +services are used. + +### Upload files and directories +This needed for `builtins.path` or `src = ./path` in Nix expressions (A), as +well as for uploading build artifacts to a store (B). + +The path specified needs to be (recursively, BFS-style) traversed. + * All file contents need to be hashed with blake3, and submitted to the + *BlobService* if not already present. + A reference to them needs to be added to the parent Directory object that's + constructed. + * All symlinks need to be added to the parent directory they reside in. + * Whenever a Directory has been fully traversed, it needs to be uploaded to + the *DirectoryService* and a reference to it needs to be added to the parent + Directory object. + +Most of the hashing / directory traversal/uploading can happen in parallel, +as long as Directory objects only refer to Directory objects and Blobs that +have already been uploaded. + +When reaching the root, a `PathInfo` object needs to be constructed. + + * In the case of content-addressed paths (A), the name of the root node is + based on the NAR representation of the contents. + It might make sense to be able to offload the NAR calculation to the store, + which can cache it. + * In the case of build artifacts (B), the output path is input-addressed and + known upfront. + +Contrary to Nix, this has the advantage of not having to upload a lot of things +to the store that didn't change. + +### Reading files from the store from the evaluator +This is the case when `nixpkgs` is located in the store, or IFD in general. + +The store client asks the `PathInfoService` for the `PathInfo` of the output +path in the request, and looks at the root node. + +If something other than the root path is requested, the root_node Directory is +inspected and potentially a chain of `Directory` objects requested from +*DirectoryService*. [^n+1query] + +When the desired file is reached, the *BlobService* can be used to read the +contents of this file, and return it back to the evaluator. + +FUTUREWORK: define how importing from symlinks should/does work. + +Contrary to Nix, this has the advantage of not having to copy all of the +contents of a store path to the evaluating machine, but really only fetching +the files the evaluator currently cares about. + +### Materializing store paths on disk +This is useful for people running a Tvix-only system, or running builds on a +"Tvix remote builder" in its own mount namespace. + +In a system with Nix installed, we can't simply manually "extract" things to +`/nix/store`, as Nix assumes to own all writes to this location. +In these usecases, we're probably better off exposing a tvix-store as a local +binary cache (that's what nar-bridge does). + +Assuming we are in an environment where we control `/nix/store` exclusively, a +"realize to disk" would either "extract" things from the tvix-store to a +filesystem, or expose a FUSE filesystem. The latter would be particularly +interesting for remote build workloads, as build inputs can be realized on- +demand, which saves copying around a lot of never-accessed files. + +In both cases, the API interactions are similar. + * The *PathInfoService* is asked for the `PathInfo` of the requested store path. + * If everything should be "extracted", the *DirectoryService* is asked for all + `Directory` objects in the closure, the file structure is created, all Blobs + are downloaded and placed in their corresponding location and all symlinks + are created accordingly. + * If this is a FUSE filesystem, we can decide to only request a subset, + similar to the "Reading files from the store from the evaluator" usecase, + even though it might make sense to keep all Directory objects around. + (See the caveat in "Trust model" though!) + +### Stores communicating with other stores +The gRPC API exposed by the tvix-store allows composing multiple stores, and +implementing some caching strategies, that store clients don't need to be aware +of. + + * For example, a caching strategy could have a fast local tvix-store, that's + asked first and filled with data from a slower remote tvix-store. + + * Multiple stores could be asked for the same data, and whatever store returns + the right data first wins. + + +## Trust model / Distribution +As already described above, the only non-content-addressed service is the +`PathInfo` service. + +This means, all other messages (such as `Blob` and `Directory` messages) can be +substituted from many different, untrusted sources/mirrors, which will make +plugging in additional substitution strategies like IPFS, local network +neighbors super simple. + +As for `PathInfo`, we don't specify an additional signature mechanism yet, but +carry the NAR-based signatures from Nix along. + +This means, if we don't trust a remote `PathInfo` object, we currently need to +"stream" the NAR representation to validate these signatures. + +However, the slow part is downloading of NAR files, and considering we have +more granularity available, we might only need to download some small blobs, +rather than a whole NAR file. + +A future signature mechanism, that is only signing (parts of) the `PathInfo` +message, which only points to content-addressed data will enable verified +partial access into a store path, opening up opportunities for lazy filesystem +access, which is very useful in remote builder scenarios. + + + +[blake3]: https://github.com/BLAKE3-team/BLAKE3 +[bao]: https://github.com/oconnor663/bao +[^input-addressed]: Nix hashes the A-Term representation of a .drv, after doing + some replacements on refered Input Derivations to calculate + output paths. +[^n+1query]: This would expose an N+1 query problem. However it's not a problem + in practice, as there's usually always a "local" caching store in + the loop, and *DirectoryService* supports a recursive lookup for + all `Directory` children of a `Directory` \ No newline at end of file diff --git a/tvix/store/docs/castore.md b/tvix/store/docs/castore.md new file mode 100644 index 000000000000..f555ba5a861b --- /dev/null +++ b/tvix/store/docs/castore.md @@ -0,0 +1,50 @@ +# //tvix/store/docs/castore.md + +This provides some more notes on the fields used in castore.proto. + +It's meant to supplement `//tvix/store/docs/api.md`. + +## Directory message +`Directory` messages use the blake3 hash of their canonical protobuf +serialization as its identifier. + +A `Directory` message contains three lists, `directories`, `files` and +`symlinks`, holding `DirectoryNode`, `FileNode` and `SymlinkNode` messages +respectively. They describe all the direct child elements that are contained in +a directory. + +All three message types have a `name` field, specifying the (base)name of the +element (which MUST not contain slashes or null bytes, and MUST not be '.' or '..'). +For reproducibility reasons, the lists MUST be sorted by that name and also +MUST be unique across all three lists. + +In addition to the `name` field, the various *Node messages have the following +fields: + +## DirectoryNode +A `DirectoryNode` message represents a child directory. + +It has a `digest` field, which points to the identifier of another `Directory` +message, making a `Directory` a merkle tree (or strictly speaking, a graph, as +two elements pointing to a child directory with the same contents would point +to the same `Directory` message. + +There's also a `size` field, containing the (total) number of all child +elements in the referenced `Directory`, which helps for inode calculation. + +## FileNode +A `FileNode` message represents a child (regular) file. + +Its `digest` field contains the blake3 hash of the file contents. It can be +looked up in the `BlobService`. + +The `size` field contains the size of the blob the `digest` field refers to. + +The `executable` field specifies whether the file should be marked as +executable or not. + +## SymlinkNode +A `SymlinkNode` message represents a child symlink. + +In addition to the `name` field, the only additional field is the `target`, +which is a string containing the target of the symlink. diff --git a/tvix/store/docs/why-not-git-trees.md b/tvix/store/docs/why-not-git-trees.md new file mode 100644 index 000000000000..fd46252cf55c --- /dev/null +++ b/tvix/store/docs/why-not-git-trees.md @@ -0,0 +1,57 @@ +## Why not git tree objects? + +We've been experimenting with (some variations of) the git tree and object +format, and ultimately decided against using it as an internal format, and +instead adapted the one documented in the other documents here. + +While the tvix-store API protocol shares some similarities with the format used +in git for trees and objects, the git one has shown some significant +disadvantages: + +### The binary encoding itself + +#### trees +The git tree object format is a very binary, error-prone and +"made-to-be-read-and-written-from-C" format. + +Tree objects are a combination of null-terminated strings, and fields of known +length. References to other tree objects use the literal sha1 hash of another +tree object in this encoding. +Extensions of the format/changes are very hard to do right, because parsers are +not aware they might be parsing something different. + +The tvix-store protocol uses a canonical protobuf serialization, and uses +the [blake3][blake3] hash of that serialization to point to other `Directory` +messages. +It's both compact and with a wide range of libraries for encoders and decoders +in many programming languages. +The choice of protobuf makes it easy to add new fields, and make old clients +aware of some unknown fields being detected [^adding-fields]. + +#### blob +On disk, git blob objects start with a "blob" prefix, then the size of the +payload, and then the data itself. The hash of a blob is the literal sha1sum +over all of this - which makes it something very git specific to request for. + +tvix-store simply uses the [blake3][blake3] hash of the literal contents +when referring to a file/blob, which makes it very easy to ask other data +sources for the same data, as no git-specific payload is included in the hash. +This also plays very well together with things like [iroh][iroh-discussion], +which plans to provide a way to substitute (large)blobs by their blake3 hash +over the IPFS network. + +In addition to that, [blake3][blake3] makes it possible to do +[verified streaming][bao], as already described in other parts of the +documentation. + +The git tree object format uses sha1 both for references to other trees and +hashes of blobs, which isn't really a hash function to fundamentally base +everything on in 2023. +The [migration to sha256][git-sha256] also has been dead for some years now, +and it's unclear how a "blake3" version of this would even look like. + +[bao]: https://github.com/oconnor663/bao +[blake3]: https://github.com/BLAKE3-team/BLAKE3 +[git-sha256]: https://git-scm.com/docs/hash-function-transition/ +[iroh-discussion]: https://github.com/n0-computer/iroh/discussions/707#discussioncomment-5070197 +[^adding-fields]: Obviously, adding new fields will change hashes, but it's something that's easy to detect. \ No newline at end of file diff --git a/tvix/store/protos/LICENSE b/tvix/store/protos/LICENSE new file mode 100644 index 000000000000..2034ada6fd9a --- /dev/null +++ b/tvix/store/protos/LICENSE @@ -0,0 +1,21 @@ +Copyright © The Tvix Authors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +“Software”), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/tvix/store/protos/castore.go b/tvix/store/protos/castore.go new file mode 100644 index 000000000000..634aa7924f64 --- /dev/null +++ b/tvix/store/protos/castore.go @@ -0,0 +1,163 @@ +package storev1 + +import ( + "fmt" + "strings" + + "google.golang.org/protobuf/proto" + "lukechampine.com/blake3" +) + +// The size of a directory is calculated by summing up the numbers of +// `directories`, `files` and `symlinks`, and for each directory, its size +// field. +func (d *Directory) Size() uint32 { + var size uint32 + size = uint32(len(d.Files) + len(d.Symlinks)) + for _, d := range d.Directories { + size += 1 + d.Size + } + return size +} + +func (d *Directory) Digest() ([]byte, error) { + b, err := proto.MarshalOptions{ + Deterministic: true, + }.Marshal(d) + + if err != nil { + return nil, fmt.Errorf("error while marshalling directory: %w", err) + } + + h := blake3.New(32, nil) + + _, err = h.Write(b) + if err != nil { + return nil, fmt.Errorf("error writing to hasher: %w", err) + } + + return h.Sum(nil), nil +} + +// isValidName checks a name for validity. +// We disallow slashes, null bytes, '.', '..' and the empty string. +// Depending on the context, a *Node message with an empty string as name is +// allowed, but they don't occur inside a Directory message. +func isValidName(n string) bool { + if n == "" || n == ".." || n == "." || strings.Contains(n, "\x00") || strings.Contains(n, "/") { + return false + } + return true +} + +// Validate thecks the Directory message for invalid data, such as: +// - violations of name restrictions +// - invalid digest lengths +// - not properly sorted lists +// - duplicate names in the three lists +func (d *Directory) Validate() error { + // seenNames contains all seen names so far. + // We populate this to ensure node names are unique across all three lists. + seenNames := make(map[string]interface{}) + + // We also track the last seen name in each of the three lists, + // to ensure nodes are sorted by their names. + var lastDirectoryName, lastFileName, lastSymlinkName string + + // helper function to only insert in sorted order. + // used with the three lists above. + // Note this consumes a *pointer to* a string, as it mutates it. + insertIfGt := func(lastName *string, name string) error { + // update if it's greater than the previous name + if name > *lastName { + *lastName = name + return nil + } else { + return fmt.Errorf("%v is not in sorted order", name) + } + } + + // insertOnce inserts into seenNames if the key doesn't exist yet. + insertOnce := func(name string) error { + if _, found := seenNames[name]; found { + return fmt.Errorf("duplicate name: %v", name) + } + seenNames[name] = nil + return nil + } + + // Loop over all Directories, Files and Symlinks individually. + // Check the name for validity, check a potential digest for length, + // then check for sorting in the current list, and uniqueness across all three lists. + for _, directoryNode := range d.Directories { + directoryName := directoryNode.GetName() + + // check name for validity + if !isValidName(directoryName) { + return fmt.Errorf("invalid name for DirectoryNode: %v", directoryName) + } + + // check digest to be 32 bytes + digestLen := len(directoryNode.GetDigest()) + if digestLen != 32 { + return fmt.Errorf("invalid digest length for DirectoryNode: %d", digestLen) + } + + // ensure names are sorted + if err := insertIfGt(&lastDirectoryName, directoryName); err != nil { + return err + } + + // add to seenNames + if err := insertOnce(directoryName); err != nil { + return err + } + + } + + for _, fileNode := range d.Files { + fileName := fileNode.GetName() + + // check name for validity + if !isValidName(fileName) { + return fmt.Errorf("invalid name for FileNode: %v", fileName) + } + + // check digest to be 32 bytes + digestLen := len(fileNode.GetDigest()) + if digestLen != 32 { + return fmt.Errorf("invalid digest length for FileNode: %d", digestLen) + } + + // ensure names are sorted + if err := insertIfGt(&lastFileName, fileName); err != nil { + return err + } + + // add to seenNames + if err := insertOnce(fileName); err != nil { + return err + } + } + + for _, symlinkNode := range d.Symlinks { + symlinkName := symlinkNode.GetName() + + // check name for validity + if !isValidName(symlinkName) { + return fmt.Errorf("invalid name for SymlinkNode: %v", symlinkName) + } + + // ensure names are sorted + if err := insertIfGt(&lastSymlinkName, symlinkName); err != nil { + return err + } + + // add to seenNames + if err := insertOnce(symlinkName); err != nil { + return err + } + } + + return nil +} diff --git a/tvix/store/protos/castore.pb.go b/tvix/store/protos/castore.pb.go new file mode 100644 index 000000000000..fab95d2e2c4f --- /dev/null +++ b/tvix/store/protos/castore.pb.go @@ -0,0 +1,450 @@ +// SPDX-FileCopyrightText: edef <edef@unfathomable.blue> +// SPDX-License-Identifier: OSL-3.0 OR MIT OR Apache-2.0 + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.29.1 +// protoc (unknown) +// source: tvix/store/protos/castore.proto + +package storev1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +// A Directory can contain Directory, File or Symlink nodes. +// Each of these nodes have a name attribute, which is the basename in that directory +// and node type specific attributes. +// The name attribute: +// - MUST not contain slashes or null bytes +// - MUST not be '.' or '..' +// - MUST be unique across all three lists +// +// Elements in each list need to be lexicographically ordered by the name +// attribute. +type Directory struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Directories []*DirectoryNode `protobuf:"bytes,1,rep,name=directories,proto3" json:"directories,omitempty"` + Files []*FileNode `protobuf:"bytes,2,rep,name=files,proto3" json:"files,omitempty"` + Symlinks []*SymlinkNode `protobuf:"bytes,3,rep,name=symlinks,proto3" json:"symlinks,omitempty"` +} + +func (x *Directory) Reset() { + *x = Directory{} + if protoimpl.UnsafeEnabled { + mi := &file_tvix_store_protos_castore_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Directory) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Directory) ProtoMessage() {} + +func (x *Directory) ProtoReflect() protoreflect.Message { + mi := &file_tvix_store_protos_castore_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Directory.ProtoReflect.Descriptor instead. +func (*Directory) Descriptor() ([]byte, []int) { + return file_tvix_store_protos_castore_proto_rawDescGZIP(), []int{0} +} + +func (x *Directory) GetDirectories() []*DirectoryNode { + if x != nil { + return x.Directories + } + return nil +} + +func (x *Directory) GetFiles() []*FileNode { + if x != nil { + return x.Files + } + return nil +} + +func (x *Directory) GetSymlinks() []*SymlinkNode { + if x != nil { + return x.Symlinks + } + return nil +} + +// A DirectoryNode represents a directory in a Directory. +type DirectoryNode struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The (base)name of the directory + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The blake3 hash of a Directory message, serialized in protobuf canonical form. + Digest []byte `protobuf:"bytes,2,opt,name=digest,proto3" json:"digest,omitempty"` + // Number of child elements in the Directory referred to by `digest`. + // Calculated by summing up the numbers of `directories`, `files` and + // `symlinks`, and for each directory, its size field. Used for inode + // number calculation. + // This field is precisely as verifiable as any other Merkle tree edge. + // Resolve `digest`, and you can compute it incrementally. Resolve the + // entire tree, and you can fully compute it from scratch. + // A credulous implementation won't reject an excessive size, but this is + // harmless: you'll have some ordinals without nodes. Undersizing is + // obvious and easy to reject: you won't have an ordinal for some nodes. + Size uint32 `protobuf:"varint,3,opt,name=size,proto3" json:"size,omitempty"` +} + +func (x *DirectoryNode) Reset() { + *x = DirectoryNode{} + if protoimpl.UnsafeEnabled { + mi := &file_tvix_store_protos_castore_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DirectoryNode) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DirectoryNode) ProtoMessage() {} + +func (x *DirectoryNode) ProtoReflect() protoreflect.Message { + mi := &file_tvix_store_protos_castore_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DirectoryNode.ProtoReflect.Descriptor instead. +func (*DirectoryNode) Descriptor() ([]byte, []int) { + return file_tvix_store_protos_castore_proto_rawDescGZIP(), []int{1} +} + +func (x *DirectoryNode) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *DirectoryNode) GetDigest() []byte { + if x != nil { + return x.Digest + } + return nil +} + +func (x *DirectoryNode) GetSize() uint32 { + if x != nil { + return x.Size + } + return 0 +} + +// A FileNode represents a regular or executable file in a Directory. +type FileNode struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The (base)name of the file + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The blake3 digest of the file contents + Digest []byte `protobuf:"bytes,2,opt,name=digest,proto3" json:"digest,omitempty"` + // The file content size + Size uint32 `protobuf:"varint,3,opt,name=size,proto3" json:"size,omitempty"` + // Whether the file is executable + Executable bool `protobuf:"varint,4,opt,name=executable,proto3" json:"executable,omitempty"` +} + +func (x *FileNode) Reset() { + *x = FileNode{} + if protoimpl.UnsafeEnabled { + mi := &file_tvix_store_protos_castore_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FileNode) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileNode) ProtoMessage() {} + +func (x *FileNode) ProtoReflect() protoreflect.Message { + mi := &file_tvix_store_protos_castore_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileNode.ProtoReflect.Descriptor instead. +func (*FileNode) Descriptor() ([]byte, []int) { + return file_tvix_store_protos_castore_proto_rawDescGZIP(), []int{2} +} + +func (x *FileNode) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *FileNode) GetDigest() []byte { + if x != nil { + return x.Digest + } + return nil +} + +func (x *FileNode) GetSize() uint32 { + if x != nil { + return x.Size + } + return 0 +} + +func (x *FileNode) GetExecutable() bool { + if x != nil { + return x.Executable + } + return false +} + +// A SymlinkNode represents a symbolic link in a Directory. +type SymlinkNode struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The (base)name of the symlink + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The target of the symlink. + Target string `protobuf:"bytes,2,opt,name=target,proto3" json:"target,omitempty"` +} + +func (x *SymlinkNode) Reset() { + *x = SymlinkNode{} + if protoimpl.UnsafeEnabled { + mi := &file_tvix_store_protos_castore_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SymlinkNode) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SymlinkNode) ProtoMessage() {} + +func (x *SymlinkNode) ProtoReflect() protoreflect.Message { + mi := &file_tvix_store_protos_castore_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SymlinkNode.ProtoReflect.Descriptor instead. +func (*SymlinkNode) Descriptor() ([]byte, []int) { + return file_tvix_store_protos_castore_proto_rawDescGZIP(), []int{3} +} + +func (x *SymlinkNode) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *SymlinkNode) GetTarget() string { + if x != nil { + return x.Target + } + return "" +} + +var File_tvix_store_protos_castore_proto protoreflect.FileDescriptor + +var file_tvix_store_protos_castore_proto_rawDesc = []byte{ + 0x0a, 0x1f, 0x74, 0x76, 0x69, 0x78, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x73, 0x2f, 0x63, 0x61, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x12, 0x0d, 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, + 0x22, 0xb2, 0x01, 0x0a, 0x09, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x3e, + 0x0a, 0x0b, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, + 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x4e, 0x6f, 0x64, + 0x65, 0x52, 0x0b, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x69, 0x65, 0x73, 0x12, 0x2d, + 0x0a, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, + 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x46, 0x69, + 0x6c, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x36, 0x0a, + 0x08, 0x73, 0x79, 0x6d, 0x6c, 0x69, 0x6e, 0x6b, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x1a, 0x2e, 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, + 0x53, 0x79, 0x6d, 0x6c, 0x69, 0x6e, 0x6b, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x08, 0x73, 0x79, 0x6d, + 0x6c, 0x69, 0x6e, 0x6b, 0x73, 0x22, 0x4f, 0x0a, 0x0d, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, + 0x72, 0x79, 0x4e, 0x6f, 0x64, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x64, 0x69, + 0x67, 0x65, 0x73, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x64, 0x69, 0x67, 0x65, + 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, + 0x52, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x22, 0x6a, 0x0a, 0x08, 0x46, 0x69, 0x6c, 0x65, 0x4e, 0x6f, + 0x64, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x12, 0x12, + 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x04, 0x73, 0x69, + 0x7a, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x61, 0x62, 0x6c, 0x65, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x61, 0x62, + 0x6c, 0x65, 0x22, 0x39, 0x0a, 0x0b, 0x53, 0x79, 0x6d, 0x6c, 0x69, 0x6e, 0x6b, 0x4e, 0x6f, 0x64, + 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x42, 0x28, 0x5a, + 0x26, 0x63, 0x6f, 0x64, 0x65, 0x2e, 0x74, 0x76, 0x6c, 0x2e, 0x66, 0x79, 0x69, 0x2f, 0x74, 0x76, + 0x69, 0x78, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x3b, + 0x73, 0x74, 0x6f, 0x72, 0x65, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_tvix_store_protos_castore_proto_rawDescOnce sync.Once + file_tvix_store_protos_castore_proto_rawDescData = file_tvix_store_protos_castore_proto_rawDesc +) + +func file_tvix_store_protos_castore_proto_rawDescGZIP() []byte { + file_tvix_store_protos_castore_proto_rawDescOnce.Do(func() { + file_tvix_store_protos_castore_proto_rawDescData = protoimpl.X.CompressGZIP(file_tvix_store_protos_castore_proto_rawDescData) + }) + return file_tvix_store_protos_castore_proto_rawDescData +} + +var file_tvix_store_protos_castore_proto_msgTypes = make([]protoimpl.MessageInfo, 4) +var file_tvix_store_protos_castore_proto_goTypes = []interface{}{ + (*Directory)(nil), // 0: tvix.store.v1.Directory + (*DirectoryNode)(nil), // 1: tvix.store.v1.DirectoryNode + (*FileNode)(nil), // 2: tvix.store.v1.FileNode + (*SymlinkNode)(nil), // 3: tvix.store.v1.SymlinkNode +} +var file_tvix_store_protos_castore_proto_depIdxs = []int32{ + 1, // 0: tvix.store.v1.Directory.directories:type_name -> tvix.store.v1.DirectoryNode + 2, // 1: tvix.store.v1.Directory.files:type_name -> tvix.store.v1.FileNode + 3, // 2: tvix.store.v1.Directory.symlinks:type_name -> tvix.store.v1.SymlinkNode + 3, // [3:3] is the sub-list for method output_type + 3, // [3:3] is the sub-list for method input_type + 3, // [3:3] is the sub-list for extension type_name + 3, // [3:3] is the sub-list for extension extendee + 0, // [0:3] is the sub-list for field type_name +} + +func init() { file_tvix_store_protos_castore_proto_init() } +func file_tvix_store_protos_castore_proto_init() { + if File_tvix_store_protos_castore_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_tvix_store_protos_castore_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Directory); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_tvix_store_protos_castore_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DirectoryNode); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_tvix_store_protos_castore_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FileNode); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_tvix_store_protos_castore_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SymlinkNode); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_tvix_store_protos_castore_proto_rawDesc, + NumEnums: 0, + NumMessages: 4, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_tvix_store_protos_castore_proto_goTypes, + DependencyIndexes: file_tvix_store_protos_castore_proto_depIdxs, + MessageInfos: file_tvix_store_protos_castore_proto_msgTypes, + }.Build() + File_tvix_store_protos_castore_proto = out.File + file_tvix_store_protos_castore_proto_rawDesc = nil + file_tvix_store_protos_castore_proto_goTypes = nil + file_tvix_store_protos_castore_proto_depIdxs = nil +} diff --git a/tvix/store/protos/castore.proto b/tvix/store/protos/castore.proto new file mode 100644 index 000000000000..747aab08bdf4 --- /dev/null +++ b/tvix/store/protos/castore.proto @@ -0,0 +1,62 @@ +// SPDX-FileCopyrightText: edef <edef@unfathomable.blue> +// SPDX-License-Identifier: OSL-3.0 OR MIT OR Apache-2.0 + +syntax = "proto3"; + +package tvix.store.v1; + +option go_package = "code.tvl.fyi/tvix/store/protos;storev1"; + +// A Directory can contain Directory, File or Symlink nodes. +// Each of these nodes have a name attribute, which is the basename in that directory +// and node type specific attributes. +// The name attribute: +// - MUST not contain slashes or null bytes +// - MUST not be '.' or '..' +// - MUST be unique across all three lists +// Elements in each list need to be lexicographically ordered by the name +// attribute. +message Directory { + repeated DirectoryNode directories = 1; + repeated FileNode files = 2; + repeated SymlinkNode symlinks = 3; +} + +// A DirectoryNode represents a directory in a Directory. +message DirectoryNode { + // The (base)name of the directory + string name = 1; + // The blake3 hash of a Directory message, serialized in protobuf canonical form. + bytes digest = 2; + // Number of child elements in the Directory referred to by `digest`. + // Calculated by summing up the numbers of `directories`, `files` and + // `symlinks`, and for each directory, its size field. Used for inode + // number calculation. + // This field is precisely as verifiable as any other Merkle tree edge. + // Resolve `digest`, and you can compute it incrementally. Resolve the + // entire tree, and you can fully compute it from scratch. + // A credulous implementation won't reject an excessive size, but this is + // harmless: you'll have some ordinals without nodes. Undersizing is + // obvious and easy to reject: you won't have an ordinal for some nodes. + uint32 size = 3; +} + +// A FileNode represents a regular or executable file in a Directory. +message FileNode { + // The (base)name of the file + string name = 1; + // The blake3 digest of the file contents + bytes digest = 2; + // The file content size + uint32 size = 3; + // Whether the file is executable + bool executable = 4; +} + +// A SymlinkNode represents a symbolic link in a Directory. +message SymlinkNode { + // The (base)name of the symlink + string name = 1; + // The target of the symlink. + string target = 2; +} diff --git a/tvix/store/protos/castore_test.go b/tvix/store/protos/castore_test.go new file mode 100644 index 000000000000..61fe535366d0 --- /dev/null +++ b/tvix/store/protos/castore_test.go @@ -0,0 +1,271 @@ +package storev1_test + +import ( + "testing" + + storev1pb "code.tvl.fyi/tvix/store/protos" + "github.com/stretchr/testify/assert" +) + +var ( + dummyDigest = []byte{ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, + } +) + +func TestDirectorySize(t *testing.T) { + t.Run("empty", func(t *testing.T) { + d := storev1pb.Directory{ + Directories: []*storev1pb.DirectoryNode{}, + Files: []*storev1pb.FileNode{}, + Symlinks: []*storev1pb.SymlinkNode{}, + } + + assert.Equal(t, uint32(0), d.Size()) + }) + + t.Run("containing single empty directory", func(t *testing.T) { + d := storev1pb.Directory{ + Directories: []*storev1pb.DirectoryNode{{ + Name: "foo", + Digest: dummyDigest, + Size: 0, + }}, + Files: []*storev1pb.FileNode{}, + Symlinks: []*storev1pb.SymlinkNode{}, + } + + assert.Equal(t, uint32(1), d.Size()) + }) + + t.Run("containing single non-empty directory", func(t *testing.T) { + d := storev1pb.Directory{ + Directories: []*storev1pb.DirectoryNode{{ + Name: "foo", + Digest: dummyDigest, + Size: 4, + }}, + Files: []*storev1pb.FileNode{}, + Symlinks: []*storev1pb.SymlinkNode{}, + } + + assert.Equal(t, uint32(5), d.Size()) + }) + + t.Run("containing single file", func(t *testing.T) { + d := storev1pb.Directory{ + Directories: []*storev1pb.DirectoryNode{}, + Files: []*storev1pb.FileNode{{ + Name: "foo", + Digest: dummyDigest, + Size: 42, + Executable: false, + }}, + Symlinks: []*storev1pb.SymlinkNode{}, + } + + assert.Equal(t, uint32(1), d.Size()) + }) + + t.Run("containing single symlink", func(t *testing.T) { + d := storev1pb.Directory{ + Directories: []*storev1pb.DirectoryNode{}, + Files: []*storev1pb.FileNode{}, + Symlinks: []*storev1pb.SymlinkNode{{ + Name: "foo", + Target: "bar", + }}, + } + + assert.Equal(t, uint32(1), d.Size()) + }) + +} +func TestDirectoryDigest(t *testing.T) { + d := storev1pb.Directory{ + Directories: []*storev1pb.DirectoryNode{}, + Files: []*storev1pb.FileNode{}, + Symlinks: []*storev1pb.SymlinkNode{}, + } + + dgst, err := d.Digest() + assert.NoError(t, err, "calling Digest() on a directory shouldn't error") + assert.Equal(t, []byte{ + 0xaf, 0x13, 0x49, 0xb9, 0xf5, 0xf9, 0xa1, 0xa6, 0xa0, 0x40, 0x4d, 0xea, 0x36, 0xdc, + 0xc9, 0x49, 0x9b, 0xcb, 0x25, 0xc9, 0xad, 0xc1, 0x12, 0xb7, 0xcc, 0x9a, 0x93, 0xca, + 0xe4, 0x1f, 0x32, 0x62, + }, dgst) +} + +func TestDirectoryValidate(t *testing.T) { + t.Run("empty", func(t *testing.T) { + d := storev1pb.Directory{ + Directories: []*storev1pb.DirectoryNode{}, + Files: []*storev1pb.FileNode{}, + Symlinks: []*storev1pb.SymlinkNode{}, + } + + assert.NoError(t, d.Validate()) + }) + + t.Run("invalid names", func(t *testing.T) { + { + d := storev1pb.Directory{ + Directories: []*storev1pb.DirectoryNode{{ + Name: "", + Digest: dummyDigest, + Size: 42, + }}, + Files: []*storev1pb.FileNode{}, + Symlinks: []*storev1pb.SymlinkNode{}, + } + + assert.ErrorContains(t, d.Validate(), "invalid name") + } + { + d := storev1pb.Directory{ + Directories: []*storev1pb.DirectoryNode{{ + Name: ".", + Digest: dummyDigest, + Size: 42, + }}, + Files: []*storev1pb.FileNode{}, + Symlinks: []*storev1pb.SymlinkNode{}, + } + + assert.ErrorContains(t, d.Validate(), "invalid name") + } + { + d := storev1pb.Directory{ + Directories: []*storev1pb.DirectoryNode{}, + Files: []*storev1pb.FileNode{{ + Name: "..", + Digest: dummyDigest, + Size: 42, + Executable: false, + }}, + Symlinks: []*storev1pb.SymlinkNode{}, + } + + assert.ErrorContains(t, d.Validate(), "invalid name") + } + { + d := storev1pb.Directory{ + Directories: []*storev1pb.DirectoryNode{}, + Files: []*storev1pb.FileNode{}, + Symlinks: []*storev1pb.SymlinkNode{{ + Name: "\x00", + Target: "foo", + }}, + } + + assert.ErrorContains(t, d.Validate(), "invalid name") + } + { + d := storev1pb.Directory{ + Directories: []*storev1pb.DirectoryNode{}, + Files: []*storev1pb.FileNode{}, + Symlinks: []*storev1pb.SymlinkNode{{ + Name: "foo/bar", + Target: "foo", + }}, + } + + assert.ErrorContains(t, d.Validate(), "invalid name") + } + }) + + t.Run("invalid digest", func(t *testing.T) { + d := storev1pb.Directory{ + Directories: []*storev1pb.DirectoryNode{{ + Name: "foo", + Digest: nil, + Size: 42, + }}, + Files: []*storev1pb.FileNode{}, + Symlinks: []*storev1pb.SymlinkNode{}, + } + + assert.ErrorContains(t, d.Validate(), "invalid digest length") + }) + + t.Run("sorting", func(t *testing.T) { + // "b" comes before "a", bad. + { + d := storev1pb.Directory{ + Directories: []*storev1pb.DirectoryNode{{ + Name: "b", + Digest: dummyDigest, + Size: 42, + }, { + Name: "a", + Digest: dummyDigest, + Size: 42, + }}, + Files: []*storev1pb.FileNode{}, + Symlinks: []*storev1pb.SymlinkNode{}, + } + assert.ErrorContains(t, d.Validate(), "is not in sorted order") + } + + // "a" exists twice, bad. + { + d := storev1pb.Directory{ + Directories: []*storev1pb.DirectoryNode{{ + Name: "a", + Digest: dummyDigest, + Size: 42, + }}, + Files: []*storev1pb.FileNode{{ + Name: "a", + Digest: dummyDigest, + Size: 42, + Executable: false, + }}, + Symlinks: []*storev1pb.SymlinkNode{}, + } + assert.ErrorContains(t, d.Validate(), "duplicate name") + } + + // "a" comes before "b", all good. + { + d := storev1pb.Directory{ + Directories: []*storev1pb.DirectoryNode{{ + Name: "a", + Digest: dummyDigest, + Size: 42, + }, { + Name: "b", + Digest: dummyDigest, + Size: 42, + }}, + Files: []*storev1pb.FileNode{}, + Symlinks: []*storev1pb.SymlinkNode{}, + } + assert.NoError(t, d.Validate(), "shouldn't error") + } + + // [b, c] and [a] are both properly sorted. + { + d := storev1pb.Directory{ + Directories: []*storev1pb.DirectoryNode{{ + Name: "b", + Digest: dummyDigest, + Size: 42, + }, { + Name: "c", + Digest: dummyDigest, + Size: 42, + }}, + Files: []*storev1pb.FileNode{}, + Symlinks: []*storev1pb.SymlinkNode{{ + Name: "a", + Target: "foo", + }}, + } + assert.NoError(t, d.Validate(), "shouldn't error") + } + }) +} diff --git a/tvix/store/protos/default.nix b/tvix/store/protos/default.nix new file mode 100644 index 000000000000..0ffdcac0418a --- /dev/null +++ b/tvix/store/protos/default.nix @@ -0,0 +1,12 @@ +# Target containing just the proto files. + +{ depot, lib, ... }: + +let + inherit (lib.strings) hasSuffix; + inherit (builtins) attrNames filter readDir; + + protoFileNames = filter (hasSuffix ".proto") (attrNames (readDir ./.)); + protoFiles = map (f: ./. + ("/" + f)) protoFileNames; +in +depot.nix.sparseTree depot.path.origSrc protoFiles diff --git a/tvix/store/protos/go.mod b/tvix/store/protos/go.mod new file mode 100644 index 000000000000..8a21555eaad5 --- /dev/null +++ b/tvix/store/protos/go.mod @@ -0,0 +1,19 @@ +module code.tvl.fyi/tvix/store/protos + +go 1.19 + +require ( + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/golang/protobuf v1.5.2 // indirect + github.com/klauspost/cpuid/v2 v2.0.9 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/stretchr/testify v1.8.1 // indirect + golang.org/x/net v0.0.0-20220722155237-a158d28d115b // indirect + golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f // indirect + golang.org/x/text v0.4.0 // indirect + google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013 // indirect + google.golang.org/grpc v1.51.0 // indirect + google.golang.org/protobuf v1.28.1 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + lukechampine.com/blake3 v1.1.7 // indirect +) diff --git a/tvix/store/protos/go.sum b/tvix/store/protos/go.sum new file mode 100644 index 000000000000..7a603cdb120d --- /dev/null +++ b/tvix/store/protos/go.sum @@ -0,0 +1,96 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/klauspost/cpuid/v2 v2.0.9 h1:lgaqFMSdTdQYdZ04uHyN2d/eKdOMyi2YLSvlQIBFYa4= +github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b h1:PxfKdU9lEEDYjdIzOtC4qFWgkU2rGHdKlKowJSMN9h0= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f h1:v4INt8xihDGvnrfjMDVXGxw9wrfxYyCjk0KbXjhR55s= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.4.0 h1:BrVqGRd7+k1DiOgtnFvAkoQEWQvBc25ouMJM6429SFg= +golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013 h1:+kGHl1aib/qcwaRi1CbqBZ1rk19r85MNUf8HaBghugY= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.51.0 h1:E1eGv1FTqoLIdnBCZufiSHgKjlqG6fKFf6pPWtMTh8U= +google.golang.org/grpc v1.51.0/go.mod h1:wgNDFcnuBGmxLKI/qn4T+m5BtEBYXJPvibbUPsAIPww= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= +google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +lukechampine.com/blake3 v1.1.7 h1:GgRMhmdsuK8+ii6UZFDL8Nb+VyMwadAgcJyfYHxG6n0= +lukechampine.com/blake3 v1.1.7/go.mod h1:tkKEOtDkNtklkXtLNEOGNq5tcV90tJiA1vAA12R78LA= diff --git a/tvix/store/protos/pathinfo.pb.go b/tvix/store/protos/pathinfo.pb.go new file mode 100644 index 000000000000..f8c66014eaa7 --- /dev/null +++ b/tvix/store/protos/pathinfo.pb.go @@ -0,0 +1,498 @@ +// SPDX-License-Identifier: MIT +// Copyright © 2022 The Tvix Authors + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.29.1 +// protoc (unknown) +// source: tvix/store/protos/pathinfo.proto + +package storev1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +// PathInfo shows information about a Nix Store Path. +// That's a single element inside /nix/store. +type PathInfo struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The path can be a directory, file or symlink. + Node *Node `protobuf:"bytes,1,opt,name=node,proto3" json:"node,omitempty"` + // List of references (output path hashes) + // This really is the raw *bytes*, after decoding nixbase32, and not a + // base32-encoded string. + References [][]byte `protobuf:"bytes,2,rep,name=references,proto3" json:"references,omitempty"` + // see below. + Narinfo *NARInfo `protobuf:"bytes,3,opt,name=narinfo,proto3" json:"narinfo,omitempty"` +} + +func (x *PathInfo) Reset() { + *x = PathInfo{} + if protoimpl.UnsafeEnabled { + mi := &file_tvix_store_protos_pathinfo_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PathInfo) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PathInfo) ProtoMessage() {} + +func (x *PathInfo) ProtoReflect() protoreflect.Message { + mi := &file_tvix_store_protos_pathinfo_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PathInfo.ProtoReflect.Descriptor instead. +func (*PathInfo) Descriptor() ([]byte, []int) { + return file_tvix_store_protos_pathinfo_proto_rawDescGZIP(), []int{0} +} + +func (x *PathInfo) GetNode() *Node { + if x != nil { + return x.Node + } + return nil +} + +func (x *PathInfo) GetReferences() [][]byte { + if x != nil { + return x.References + } + return nil +} + +func (x *PathInfo) GetNarinfo() *NARInfo { + if x != nil { + return x.Narinfo + } + return nil +} + +type Node struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Node: + // + // *Node_Directory + // *Node_File + // *Node_Symlink + Node isNode_Node `protobuf_oneof:"node"` +} + +func (x *Node) Reset() { + *x = Node{} + if protoimpl.UnsafeEnabled { + mi := &file_tvix_store_protos_pathinfo_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Node) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Node) ProtoMessage() {} + +func (x *Node) ProtoReflect() protoreflect.Message { + mi := &file_tvix_store_protos_pathinfo_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Node.ProtoReflect.Descriptor instead. +func (*Node) Descriptor() ([]byte, []int) { + return file_tvix_store_protos_pathinfo_proto_rawDescGZIP(), []int{1} +} + +func (m *Node) GetNode() isNode_Node { + if m != nil { + return m.Node + } + return nil +} + +func (x *Node) GetDirectory() *DirectoryNode { + if x, ok := x.GetNode().(*Node_Directory); ok { + return x.Directory + } + return nil +} + +func (x *Node) GetFile() *FileNode { + if x, ok := x.GetNode().(*Node_File); ok { + return x.File + } + return nil +} + +func (x *Node) GetSymlink() *SymlinkNode { + if x, ok := x.GetNode().(*Node_Symlink); ok { + return x.Symlink + } + return nil +} + +type isNode_Node interface { + isNode_Node() +} + +type Node_Directory struct { + Directory *DirectoryNode `protobuf:"bytes,1,opt,name=directory,proto3,oneof"` +} + +type Node_File struct { + File *FileNode `protobuf:"bytes,2,opt,name=file,proto3,oneof"` +} + +type Node_Symlink struct { + Symlink *SymlinkNode `protobuf:"bytes,3,opt,name=symlink,proto3,oneof"` +} + +func (*Node_Directory) isNode_Node() {} + +func (*Node_File) isNode_Node() {} + +func (*Node_Symlink) isNode_Node() {} + +// Nix C++ uses NAR (Nix Archive) as a format to transfer store paths, +// and stores metadata and signatures in NARInfo files. +// Store all these attributes in a separate message. +// +// This is useful to render .narinfo files to clients, or to preserve/validate +// these signatures. +// As verifying these signatures requires the whole NAR file to be synthesized, +// moving to another signature scheme is desired. +// Even then, it still makes sense to hold this data, for old clients. +type NARInfo struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // This size of the NAR file, in bytes. + NarSize uint64 `protobuf:"varint,1,opt,name=nar_size,json=narSize,proto3" json:"nar_size,omitempty"` + // The sha256 of the NAR file representation. + NarSha256 []byte `protobuf:"bytes,2,opt,name=nar_sha256,json=narSha256,proto3" json:"nar_sha256,omitempty"` + // The signatures in a .narinfo file. + Signatures []*NARInfo_Signature `protobuf:"bytes,3,rep,name=signatures,proto3" json:"signatures,omitempty"` + // A list of references. To validate .narinfo signatures, a fingerprint + // needs to be constructed. + // This fingerprint doesn't just contain the hashes of the output paths of + // all references (like PathInfo.references), but their whole (base)names, + // so we need to keep them somewhere. + ReferenceNames []string `protobuf:"bytes,4,rep,name=reference_names,json=referenceNames,proto3" json:"reference_names,omitempty"` +} + +func (x *NARInfo) Reset() { + *x = NARInfo{} + if protoimpl.UnsafeEnabled { + mi := &file_tvix_store_protos_pathinfo_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *NARInfo) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NARInfo) ProtoMessage() {} + +func (x *NARInfo) ProtoReflect() protoreflect.Message { + mi := &file_tvix_store_protos_pathinfo_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NARInfo.ProtoReflect.Descriptor instead. +func (*NARInfo) Descriptor() ([]byte, []int) { + return file_tvix_store_protos_pathinfo_proto_rawDescGZIP(), []int{2} +} + +func (x *NARInfo) GetNarSize() uint64 { + if x != nil { + return x.NarSize + } + return 0 +} + +func (x *NARInfo) GetNarSha256() []byte { + if x != nil { + return x.NarSha256 + } + return nil +} + +func (x *NARInfo) GetSignatures() []*NARInfo_Signature { + if x != nil { + return x.Signatures + } + return nil +} + +func (x *NARInfo) GetReferenceNames() []string { + if x != nil { + return x.ReferenceNames + } + return nil +} + +// This represents a (parsed) signature line in a .narinfo file. +type NARInfo_Signature struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *NARInfo_Signature) Reset() { + *x = NARInfo_Signature{} + if protoimpl.UnsafeEnabled { + mi := &file_tvix_store_protos_pathinfo_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *NARInfo_Signature) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NARInfo_Signature) ProtoMessage() {} + +func (x *NARInfo_Signature) ProtoReflect() protoreflect.Message { + mi := &file_tvix_store_protos_pathinfo_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NARInfo_Signature.ProtoReflect.Descriptor instead. +func (*NARInfo_Signature) Descriptor() ([]byte, []int) { + return file_tvix_store_protos_pathinfo_proto_rawDescGZIP(), []int{2, 0} +} + +func (x *NARInfo_Signature) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *NARInfo_Signature) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +var File_tvix_store_protos_pathinfo_proto protoreflect.FileDescriptor + +var file_tvix_store_protos_pathinfo_proto_rawDesc = []byte{ + 0x0a, 0x20, 0x74, 0x76, 0x69, 0x78, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x73, 0x2f, 0x70, 0x61, 0x74, 0x68, 0x69, 0x6e, 0x66, 0x6f, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x12, 0x0d, 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, + 0x31, 0x1a, 0x1f, 0x74, 0x76, 0x69, 0x78, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x63, 0x61, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x22, 0x85, 0x01, 0x0a, 0x08, 0x50, 0x61, 0x74, 0x68, 0x49, 0x6e, 0x66, 0x6f, 0x12, + 0x27, 0x0a, 0x04, 0x6e, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, + 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4e, 0x6f, + 0x64, 0x65, 0x52, 0x04, 0x6e, 0x6f, 0x64, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x72, 0x65, 0x66, 0x65, + 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x0a, 0x72, 0x65, + 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x12, 0x30, 0x0a, 0x07, 0x6e, 0x61, 0x72, 0x69, + 0x6e, 0x66, 0x6f, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x74, 0x76, 0x69, 0x78, + 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4e, 0x41, 0x52, 0x49, 0x6e, 0x66, + 0x6f, 0x52, 0x07, 0x6e, 0x61, 0x72, 0x69, 0x6e, 0x66, 0x6f, 0x22, 0xb3, 0x01, 0x0a, 0x04, 0x4e, + 0x6f, 0x64, 0x65, 0x12, 0x3c, 0x0a, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, + 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, + 0x4e, 0x6f, 0x64, 0x65, 0x48, 0x00, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, + 0x79, 0x12, 0x2d, 0x0a, 0x04, 0x66, 0x69, 0x6c, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x17, 0x2e, 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, + 0x46, 0x69, 0x6c, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x48, 0x00, 0x52, 0x04, 0x66, 0x69, 0x6c, 0x65, + 0x12, 0x36, 0x0a, 0x07, 0x73, 0x79, 0x6d, 0x6c, 0x69, 0x6e, 0x6b, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1a, 0x2e, 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, + 0x31, 0x2e, 0x53, 0x79, 0x6d, 0x6c, 0x69, 0x6e, 0x6b, 0x4e, 0x6f, 0x64, 0x65, 0x48, 0x00, 0x52, + 0x07, 0x73, 0x79, 0x6d, 0x6c, 0x69, 0x6e, 0x6b, 0x42, 0x06, 0x0a, 0x04, 0x6e, 0x6f, 0x64, 0x65, + 0x22, 0xe3, 0x01, 0x0a, 0x07, 0x4e, 0x41, 0x52, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x19, 0x0a, 0x08, + 0x6e, 0x61, 0x72, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x07, + 0x6e, 0x61, 0x72, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x6e, 0x61, 0x72, 0x5f, 0x73, + 0x68, 0x61, 0x32, 0x35, 0x36, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x09, 0x6e, 0x61, 0x72, + 0x53, 0x68, 0x61, 0x32, 0x35, 0x36, 0x12, 0x40, 0x0a, 0x0a, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, + 0x75, 0x72, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x74, 0x76, 0x69, + 0x78, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4e, 0x41, 0x52, 0x49, 0x6e, + 0x66, 0x6f, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x0a, 0x73, 0x69, + 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x12, 0x27, 0x0a, 0x0f, 0x72, 0x65, 0x66, 0x65, + 0x72, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, + 0x09, 0x52, 0x0e, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x4e, 0x61, 0x6d, 0x65, + 0x73, 0x1a, 0x33, 0x0a, 0x09, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x12, 0x12, + 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, + 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, + 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x42, 0x28, 0x5a, 0x26, 0x63, 0x6f, 0x64, 0x65, 0x2e, 0x74, + 0x76, 0x6c, 0x2e, 0x66, 0x79, 0x69, 0x2f, 0x74, 0x76, 0x69, 0x78, 0x2f, 0x73, 0x74, 0x6f, 0x72, + 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x3b, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x76, 0x31, + 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_tvix_store_protos_pathinfo_proto_rawDescOnce sync.Once + file_tvix_store_protos_pathinfo_proto_rawDescData = file_tvix_store_protos_pathinfo_proto_rawDesc +) + +func file_tvix_store_protos_pathinfo_proto_rawDescGZIP() []byte { + file_tvix_store_protos_pathinfo_proto_rawDescOnce.Do(func() { + file_tvix_store_protos_pathinfo_proto_rawDescData = protoimpl.X.CompressGZIP(file_tvix_store_protos_pathinfo_proto_rawDescData) + }) + return file_tvix_store_protos_pathinfo_proto_rawDescData +} + +var file_tvix_store_protos_pathinfo_proto_msgTypes = make([]protoimpl.MessageInfo, 4) +var file_tvix_store_protos_pathinfo_proto_goTypes = []interface{}{ + (*PathInfo)(nil), // 0: tvix.store.v1.PathInfo + (*Node)(nil), // 1: tvix.store.v1.Node + (*NARInfo)(nil), // 2: tvix.store.v1.NARInfo + (*NARInfo_Signature)(nil), // 3: tvix.store.v1.NARInfo.Signature + (*DirectoryNode)(nil), // 4: tvix.store.v1.DirectoryNode + (*FileNode)(nil), // 5: tvix.store.v1.FileNode + (*SymlinkNode)(nil), // 6: tvix.store.v1.SymlinkNode +} +var file_tvix_store_protos_pathinfo_proto_depIdxs = []int32{ + 1, // 0: tvix.store.v1.PathInfo.node:type_name -> tvix.store.v1.Node + 2, // 1: tvix.store.v1.PathInfo.narinfo:type_name -> tvix.store.v1.NARInfo + 4, // 2: tvix.store.v1.Node.directory:type_name -> tvix.store.v1.DirectoryNode + 5, // 3: tvix.store.v1.Node.file:type_name -> tvix.store.v1.FileNode + 6, // 4: tvix.store.v1.Node.symlink:type_name -> tvix.store.v1.SymlinkNode + 3, // 5: tvix.store.v1.NARInfo.signatures:type_name -> tvix.store.v1.NARInfo.Signature + 6, // [6:6] is the sub-list for method output_type + 6, // [6:6] is the sub-list for method input_type + 6, // [6:6] is the sub-list for extension type_name + 6, // [6:6] is the sub-list for extension extendee + 0, // [0:6] is the sub-list for field type_name +} + +func init() { file_tvix_store_protos_pathinfo_proto_init() } +func file_tvix_store_protos_pathinfo_proto_init() { + if File_tvix_store_protos_pathinfo_proto != nil { + return + } + file_tvix_store_protos_castore_proto_init() + if !protoimpl.UnsafeEnabled { + file_tvix_store_protos_pathinfo_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PathInfo); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_tvix_store_protos_pathinfo_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Node); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_tvix_store_protos_pathinfo_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*NARInfo); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_tvix_store_protos_pathinfo_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*NARInfo_Signature); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_tvix_store_protos_pathinfo_proto_msgTypes[1].OneofWrappers = []interface{}{ + (*Node_Directory)(nil), + (*Node_File)(nil), + (*Node_Symlink)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_tvix_store_protos_pathinfo_proto_rawDesc, + NumEnums: 0, + NumMessages: 4, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_tvix_store_protos_pathinfo_proto_goTypes, + DependencyIndexes: file_tvix_store_protos_pathinfo_proto_depIdxs, + MessageInfos: file_tvix_store_protos_pathinfo_proto_msgTypes, + }.Build() + File_tvix_store_protos_pathinfo_proto = out.File + file_tvix_store_protos_pathinfo_proto_rawDesc = nil + file_tvix_store_protos_pathinfo_proto_goTypes = nil + file_tvix_store_protos_pathinfo_proto_depIdxs = nil +} diff --git a/tvix/store/protos/pathinfo.proto b/tvix/store/protos/pathinfo.proto new file mode 100644 index 000000000000..896d4aa225ac --- /dev/null +++ b/tvix/store/protos/pathinfo.proto @@ -0,0 +1,66 @@ +// SPDX-License-Identifier: MIT +// Copyright © 2022 The Tvix Authors +syntax = "proto3"; + +package tvix.store.v1; + +import "tvix/store/protos/castore.proto"; + +option go_package = "code.tvl.fyi/tvix/store/protos;storev1"; + +// PathInfo shows information about a Nix Store Path. +// That's a single element inside /nix/store. +message PathInfo { + // The path can be a directory, file or symlink. + Node node = 1; + + // List of references (output path hashes) + // This really is the raw *bytes*, after decoding nixbase32, and not a + // base32-encoded string. + repeated bytes references = 2; + + // see below. + NARInfo narinfo = 3; +} + +message Node { + oneof node { + DirectoryNode directory = 1; + FileNode file = 2; + SymlinkNode symlink = 3; + } +} + +// Nix C++ uses NAR (Nix Archive) as a format to transfer store paths, +// and stores metadata and signatures in NARInfo files. +// Store all these attributes in a separate message. +// +// This is useful to render .narinfo files to clients, or to preserve/validate +// these signatures. +// As verifying these signatures requires the whole NAR file to be synthesized, +// moving to another signature scheme is desired. +// Even then, it still makes sense to hold this data, for old clients. +message NARInfo { + // This represents a (parsed) signature line in a .narinfo file. + message Signature { + string name = 1; + bytes data = 2; + }; + + // This size of the NAR file, in bytes. + uint64 nar_size = 1; + + // The sha256 of the NAR file representation. + bytes nar_sha256 = 2; + + // The signatures in a .narinfo file. + repeated Signature signatures = 3; + + // A list of references. To validate .narinfo signatures, a fingerprint + // needs to be constructed. + // This fingerprint doesn't just contain the hashes of the output paths of + // all references (like PathInfo.references), but their whole (base)names, + // so we need to keep them somewhere. + repeated string reference_names = 4; + +} diff --git a/tvix/store/protos/rpc_blobstore.pb.go b/tvix/store/protos/rpc_blobstore.pb.go new file mode 100644 index 000000000000..850716757668 --- /dev/null +++ b/tvix/store/protos/rpc_blobstore.pb.go @@ -0,0 +1,542 @@ +// SPDX-License-Identifier: MIT +// Copyright © 2022 The Tvix Authors + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.29.1 +// protoc (unknown) +// source: tvix/store/protos/rpc_blobstore.proto + +package storev1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type StatBlobRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The blake3 digest of the blob requested + Digest []byte `protobuf:"bytes,1,opt,name=digest,proto3" json:"digest,omitempty"` + // Whether to include the chunks field + IncludeChunks bool `protobuf:"varint,2,opt,name=include_chunks,json=includeChunks,proto3" json:"include_chunks,omitempty"` + // Whether to include the inline_bao field, containing an (outboard) bao. + // The [bao](https://github.com/oconnor663/bao/blob/master/docs/spec.md) + // can be used to validate chunks end up hashing to the same root digest. + // These only really matter when only downloading parts of a blob. Some + // caution needs to be applied when validating chunks - the bao works with + // 1K leaf nodes, which might not align with the chunk sizes - this might + // imply a neighboring chunk might need to be (partially) fetched to + // validate the hash. + IncludeBao bool `protobuf:"varint,3,opt,name=include_bao,json=includeBao,proto3" json:"include_bao,omitempty"` +} + +func (x *StatBlobRequest) Reset() { + *x = StatBlobRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_tvix_store_protos_rpc_blobstore_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *StatBlobRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*StatBlobRequest) ProtoMessage() {} + +func (x *StatBlobRequest) ProtoReflect() protoreflect.Message { + mi := &file_tvix_store_protos_rpc_blobstore_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use StatBlobRequest.ProtoReflect.Descriptor instead. +func (*StatBlobRequest) Descriptor() ([]byte, []int) { + return file_tvix_store_protos_rpc_blobstore_proto_rawDescGZIP(), []int{0} +} + +func (x *StatBlobRequest) GetDigest() []byte { + if x != nil { + return x.Digest + } + return nil +} + +func (x *StatBlobRequest) GetIncludeChunks() bool { + if x != nil { + return x.IncludeChunks + } + return false +} + +func (x *StatBlobRequest) GetIncludeBao() bool { + if x != nil { + return x.IncludeBao + } + return false +} + +// BlobMeta provides more granular chunking information for the requested blob, +// and baos. +type BlobMeta struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // This provides a list of chunks. + // Concatenating their contents would produce a blob with the digest that + // was specified in the request. + Chunks []*BlobMeta_ChunkMeta `protobuf:"bytes,1,rep,name=chunks,proto3" json:"chunks,omitempty"` + InlineBao []byte `protobuf:"bytes,2,opt,name=inline_bao,json=inlineBao,proto3" json:"inline_bao,omitempty"` +} + +func (x *BlobMeta) Reset() { + *x = BlobMeta{} + if protoimpl.UnsafeEnabled { + mi := &file_tvix_store_protos_rpc_blobstore_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *BlobMeta) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BlobMeta) ProtoMessage() {} + +func (x *BlobMeta) ProtoReflect() protoreflect.Message { + mi := &file_tvix_store_protos_rpc_blobstore_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BlobMeta.ProtoReflect.Descriptor instead. +func (*BlobMeta) Descriptor() ([]byte, []int) { + return file_tvix_store_protos_rpc_blobstore_proto_rawDescGZIP(), []int{1} +} + +func (x *BlobMeta) GetChunks() []*BlobMeta_ChunkMeta { + if x != nil { + return x.Chunks + } + return nil +} + +func (x *BlobMeta) GetInlineBao() []byte { + if x != nil { + return x.InlineBao + } + return nil +} + +type ReadBlobRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The blake3 digest of the blob or chunk requested + Digest []byte `protobuf:"bytes,1,opt,name=digest,proto3" json:"digest,omitempty"` +} + +func (x *ReadBlobRequest) Reset() { + *x = ReadBlobRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_tvix_store_protos_rpc_blobstore_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ReadBlobRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ReadBlobRequest) ProtoMessage() {} + +func (x *ReadBlobRequest) ProtoReflect() protoreflect.Message { + mi := &file_tvix_store_protos_rpc_blobstore_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ReadBlobRequest.ProtoReflect.Descriptor instead. +func (*ReadBlobRequest) Descriptor() ([]byte, []int) { + return file_tvix_store_protos_rpc_blobstore_proto_rawDescGZIP(), []int{2} +} + +func (x *ReadBlobRequest) GetDigest() []byte { + if x != nil { + return x.Digest + } + return nil +} + +// This represents some bytes of a blob. +// Blobs are sent in smaller chunks to keep message sizes manageable. +type BlobChunk struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *BlobChunk) Reset() { + *x = BlobChunk{} + if protoimpl.UnsafeEnabled { + mi := &file_tvix_store_protos_rpc_blobstore_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *BlobChunk) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BlobChunk) ProtoMessage() {} + +func (x *BlobChunk) ProtoReflect() protoreflect.Message { + mi := &file_tvix_store_protos_rpc_blobstore_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BlobChunk.ProtoReflect.Descriptor instead. +func (*BlobChunk) Descriptor() ([]byte, []int) { + return file_tvix_store_protos_rpc_blobstore_proto_rawDescGZIP(), []int{3} +} + +func (x *BlobChunk) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type PutBlobResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The blake3 digest of the data that was sent. + Digest []byte `protobuf:"bytes,1,opt,name=digest,proto3" json:"digest,omitempty"` +} + +func (x *PutBlobResponse) Reset() { + *x = PutBlobResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_tvix_store_protos_rpc_blobstore_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PutBlobResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PutBlobResponse) ProtoMessage() {} + +func (x *PutBlobResponse) ProtoReflect() protoreflect.Message { + mi := &file_tvix_store_protos_rpc_blobstore_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PutBlobResponse.ProtoReflect.Descriptor instead. +func (*PutBlobResponse) Descriptor() ([]byte, []int) { + return file_tvix_store_protos_rpc_blobstore_proto_rawDescGZIP(), []int{4} +} + +func (x *PutBlobResponse) GetDigest() []byte { + if x != nil { + return x.Digest + } + return nil +} + +type BlobMeta_ChunkMeta struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Digest []byte `protobuf:"bytes,1,opt,name=digest,proto3" json:"digest,omitempty"` + Size uint32 `protobuf:"varint,2,opt,name=size,proto3" json:"size,omitempty"` +} + +func (x *BlobMeta_ChunkMeta) Reset() { + *x = BlobMeta_ChunkMeta{} + if protoimpl.UnsafeEnabled { + mi := &file_tvix_store_protos_rpc_blobstore_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *BlobMeta_ChunkMeta) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BlobMeta_ChunkMeta) ProtoMessage() {} + +func (x *BlobMeta_ChunkMeta) ProtoReflect() protoreflect.Message { + mi := &file_tvix_store_protos_rpc_blobstore_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BlobMeta_ChunkMeta.ProtoReflect.Descriptor instead. +func (*BlobMeta_ChunkMeta) Descriptor() ([]byte, []int) { + return file_tvix_store_protos_rpc_blobstore_proto_rawDescGZIP(), []int{1, 0} +} + +func (x *BlobMeta_ChunkMeta) GetDigest() []byte { + if x != nil { + return x.Digest + } + return nil +} + +func (x *BlobMeta_ChunkMeta) GetSize() uint32 { + if x != nil { + return x.Size + } + return 0 +} + +var File_tvix_store_protos_rpc_blobstore_proto protoreflect.FileDescriptor + +var file_tvix_store_protos_rpc_blobstore_proto_rawDesc = []byte{ + 0x0a, 0x25, 0x74, 0x76, 0x69, 0x78, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x73, 0x2f, 0x72, 0x70, 0x63, 0x5f, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x74, 0x6f, 0x72, + 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0d, 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, + 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x22, 0x71, 0x0a, 0x0f, 0x53, 0x74, 0x61, 0x74, 0x42, 0x6c, + 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x64, 0x69, 0x67, + 0x65, 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, + 0x74, 0x12, 0x25, 0x0a, 0x0e, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x5f, 0x63, 0x68, 0x75, + 0x6e, 0x6b, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0d, 0x69, 0x6e, 0x63, 0x6c, 0x75, + 0x64, 0x65, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x69, 0x6e, 0x63, 0x6c, + 0x75, 0x64, 0x65, 0x5f, 0x62, 0x61, 0x6f, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x69, + 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x42, 0x61, 0x6f, 0x22, 0x9d, 0x01, 0x0a, 0x08, 0x42, 0x6c, + 0x6f, 0x62, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x39, 0x0a, 0x06, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x73, + 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, + 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x42, 0x6c, 0x6f, 0x62, 0x4d, 0x65, 0x74, 0x61, 0x2e, + 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x4d, 0x65, 0x74, 0x61, 0x52, 0x06, 0x63, 0x68, 0x75, 0x6e, 0x6b, + 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x62, 0x61, 0x6f, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x09, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x42, 0x61, 0x6f, + 0x1a, 0x37, 0x0a, 0x09, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x16, 0x0a, + 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x64, + 0x69, 0x67, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0d, 0x52, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x22, 0x29, 0x0a, 0x0f, 0x52, 0x65, 0x61, + 0x64, 0x42, 0x6c, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x16, 0x0a, 0x06, + 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x64, 0x69, + 0x67, 0x65, 0x73, 0x74, 0x22, 0x1f, 0x0a, 0x09, 0x42, 0x6c, 0x6f, 0x62, 0x43, 0x68, 0x75, 0x6e, + 0x6b, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, + 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x29, 0x0a, 0x0f, 0x50, 0x75, 0x74, 0x42, 0x6c, 0x6f, 0x62, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x64, 0x69, 0x67, 0x65, + 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, + 0x32, 0xd5, 0x01, 0x0a, 0x0b, 0x42, 0x6c, 0x6f, 0x62, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, + 0x12, 0x3f, 0x0a, 0x04, 0x53, 0x74, 0x61, 0x74, 0x12, 0x1e, 0x2e, 0x74, 0x76, 0x69, 0x78, 0x2e, + 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x42, 0x6c, 0x6f, + 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x74, 0x76, 0x69, 0x78, 0x2e, + 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x42, 0x6c, 0x6f, 0x62, 0x4d, 0x65, 0x74, + 0x61, 0x12, 0x42, 0x0a, 0x04, 0x52, 0x65, 0x61, 0x64, 0x12, 0x1e, 0x2e, 0x74, 0x76, 0x69, 0x78, + 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x42, 0x6c, + 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x74, 0x76, 0x69, 0x78, + 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x42, 0x6c, 0x6f, 0x62, 0x43, 0x68, + 0x75, 0x6e, 0x6b, 0x30, 0x01, 0x12, 0x41, 0x0a, 0x03, 0x50, 0x75, 0x74, 0x12, 0x18, 0x2e, 0x74, + 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x42, 0x6c, 0x6f, + 0x62, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x1a, 0x1e, 0x2e, 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, + 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x74, 0x42, 0x6c, 0x6f, 0x62, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x28, 0x01, 0x42, 0x28, 0x5a, 0x26, 0x63, 0x6f, 0x64, 0x65, + 0x2e, 0x74, 0x76, 0x6c, 0x2e, 0x66, 0x79, 0x69, 0x2f, 0x74, 0x76, 0x69, 0x78, 0x2f, 0x73, 0x74, + 0x6f, 0x72, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x3b, 0x73, 0x74, 0x6f, 0x72, 0x65, + 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_tvix_store_protos_rpc_blobstore_proto_rawDescOnce sync.Once + file_tvix_store_protos_rpc_blobstore_proto_rawDescData = file_tvix_store_protos_rpc_blobstore_proto_rawDesc +) + +func file_tvix_store_protos_rpc_blobstore_proto_rawDescGZIP() []byte { + file_tvix_store_protos_rpc_blobstore_proto_rawDescOnce.Do(func() { + file_tvix_store_protos_rpc_blobstore_proto_rawDescData = protoimpl.X.CompressGZIP(file_tvix_store_protos_rpc_blobstore_proto_rawDescData) + }) + return file_tvix_store_protos_rpc_blobstore_proto_rawDescData +} + +var file_tvix_store_protos_rpc_blobstore_proto_msgTypes = make([]protoimpl.MessageInfo, 6) +var file_tvix_store_protos_rpc_blobstore_proto_goTypes = []interface{}{ + (*StatBlobRequest)(nil), // 0: tvix.store.v1.StatBlobRequest + (*BlobMeta)(nil), // 1: tvix.store.v1.BlobMeta + (*ReadBlobRequest)(nil), // 2: tvix.store.v1.ReadBlobRequest + (*BlobChunk)(nil), // 3: tvix.store.v1.BlobChunk + (*PutBlobResponse)(nil), // 4: tvix.store.v1.PutBlobResponse + (*BlobMeta_ChunkMeta)(nil), // 5: tvix.store.v1.BlobMeta.ChunkMeta +} +var file_tvix_store_protos_rpc_blobstore_proto_depIdxs = []int32{ + 5, // 0: tvix.store.v1.BlobMeta.chunks:type_name -> tvix.store.v1.BlobMeta.ChunkMeta + 0, // 1: tvix.store.v1.BlobService.Stat:input_type -> tvix.store.v1.StatBlobRequest + 2, // 2: tvix.store.v1.BlobService.Read:input_type -> tvix.store.v1.ReadBlobRequest + 3, // 3: tvix.store.v1.BlobService.Put:input_type -> tvix.store.v1.BlobChunk + 1, // 4: tvix.store.v1.BlobService.Stat:output_type -> tvix.store.v1.BlobMeta + 3, // 5: tvix.store.v1.BlobService.Read:output_type -> tvix.store.v1.BlobChunk + 4, // 6: tvix.store.v1.BlobService.Put:output_type -> tvix.store.v1.PutBlobResponse + 4, // [4:7] is the sub-list for method output_type + 1, // [1:4] is the sub-list for method input_type + 1, // [1:1] is the sub-list for extension type_name + 1, // [1:1] is the sub-list for extension extendee + 0, // [0:1] is the sub-list for field type_name +} + +func init() { file_tvix_store_protos_rpc_blobstore_proto_init() } +func file_tvix_store_protos_rpc_blobstore_proto_init() { + if File_tvix_store_protos_rpc_blobstore_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_tvix_store_protos_rpc_blobstore_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*StatBlobRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_tvix_store_protos_rpc_blobstore_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*BlobMeta); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_tvix_store_protos_rpc_blobstore_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ReadBlobRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_tvix_store_protos_rpc_blobstore_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*BlobChunk); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_tvix_store_protos_rpc_blobstore_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PutBlobResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_tvix_store_protos_rpc_blobstore_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*BlobMeta_ChunkMeta); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_tvix_store_protos_rpc_blobstore_proto_rawDesc, + NumEnums: 0, + NumMessages: 6, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_tvix_store_protos_rpc_blobstore_proto_goTypes, + DependencyIndexes: file_tvix_store_protos_rpc_blobstore_proto_depIdxs, + MessageInfos: file_tvix_store_protos_rpc_blobstore_proto_msgTypes, + }.Build() + File_tvix_store_protos_rpc_blobstore_proto = out.File + file_tvix_store_protos_rpc_blobstore_proto_rawDesc = nil + file_tvix_store_protos_rpc_blobstore_proto_goTypes = nil + file_tvix_store_protos_rpc_blobstore_proto_depIdxs = nil +} diff --git a/tvix/store/protos/rpc_blobstore.proto b/tvix/store/protos/rpc_blobstore.proto new file mode 100644 index 000000000000..a7d48cf7709c --- /dev/null +++ b/tvix/store/protos/rpc_blobstore.proto @@ -0,0 +1,80 @@ +// SPDX-License-Identifier: MIT +// Copyright © 2022 The Tvix Authors +syntax = "proto3"; + +package tvix.store.v1; + +option go_package = "code.tvl.fyi/tvix/store/protos;storev1"; + +service BlobService { + // Stat exposes metadata about a given blob, + // such as more granular chunking, baos. + // It implicitly allows checking for existence too, as asking this for a + // non-existing Blob will return a Status::not_found grpc error. + // If there's no more granular chunking available, the response will simply + // contain a single chunk. + rpc Stat(StatBlobRequest) returns (BlobMeta); + + // Read returns a stream of BlobChunk, which is just a stream of bytes with + // the digest specified in ReadBlobRequest. + // + // The server may decide on whatever chunking it may seem fit as a size for + // the individual BlobChunk sent in the response stream. + // + rpc Read(ReadBlobRequest) returns (stream BlobChunk); + + // Put uploads a Blob, by reading a stream of bytes. + // + // The way the data is chunked up in individual BlobChunk messages sent in + // the stream has no effect on how the server ends up chunking blobs up. + rpc Put(stream BlobChunk) returns (PutBlobResponse); +} + +message StatBlobRequest { + // The blake3 digest of the blob requested + bytes digest = 1; + + // Whether to include the chunks field + bool include_chunks = 2; + // Whether to include the inline_bao field, containing an (outboard) bao. + // The [bao](https://github.com/oconnor663/bao/blob/master/docs/spec.md) + // can be used to validate chunks end up hashing to the same root digest. + // These only really matter when only downloading parts of a blob. Some + // caution needs to be applied when validating chunks - the bao works with + // 1K leaf nodes, which might not align with the chunk sizes - this might + // imply a neighboring chunk might need to be (partially) fetched to + // validate the hash. + bool include_bao = 3; +} + +// BlobMeta provides more granular chunking information for the requested blob, +// and baos. +message BlobMeta { + // This provides a list of chunks. + // Concatenating their contents would produce a blob with the digest that + // was specified in the request. + repeated ChunkMeta chunks = 1; + + message ChunkMeta { + bytes digest = 1; + uint32 size = 2; + } + + bytes inline_bao = 2; +} + +message ReadBlobRequest { + // The blake3 digest of the blob or chunk requested + bytes digest = 1; +} + +// This represents some bytes of a blob. +// Blobs are sent in smaller chunks to keep message sizes manageable. +message BlobChunk { + bytes data = 1; +} + +message PutBlobResponse { + // The blake3 digest of the data that was sent. + bytes digest = 1; +} diff --git a/tvix/store/protos/rpc_blobstore_grpc.pb.go b/tvix/store/protos/rpc_blobstore_grpc.pb.go new file mode 100644 index 000000000000..3842ea288544 --- /dev/null +++ b/tvix/store/protos/rpc_blobstore_grpc.pb.go @@ -0,0 +1,278 @@ +// SPDX-License-Identifier: MIT +// Copyright © 2022 The Tvix Authors + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.3.0 +// - protoc (unknown) +// source: tvix/store/protos/rpc_blobstore.proto + +package storev1 + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +const ( + BlobService_Stat_FullMethodName = "/tvix.store.v1.BlobService/Stat" + BlobService_Read_FullMethodName = "/tvix.store.v1.BlobService/Read" + BlobService_Put_FullMethodName = "/tvix.store.v1.BlobService/Put" +) + +// BlobServiceClient is the client API for BlobService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type BlobServiceClient interface { + // Stat exposes metadata about a given blob, + // such as more granular chunking, baos. + // It implicitly allows checking for existence too, as asking this for a + // non-existing Blob will return a Status::not_found grpc error. + // If there's no more granular chunking available, the response will simply + // contain a single chunk. + Stat(ctx context.Context, in *StatBlobRequest, opts ...grpc.CallOption) (*BlobMeta, error) + // Read returns a stream of BlobChunk, which is just a stream of bytes with + // the digest specified in ReadBlobRequest. + // + // The server may decide on whatever chunking it may seem fit as a size for + // the individual BlobChunk sent in the response stream. + Read(ctx context.Context, in *ReadBlobRequest, opts ...grpc.CallOption) (BlobService_ReadClient, error) + // Put uploads a Blob, by reading a stream of bytes. + // + // The way the data is chunked up in individual BlobChunk messages sent in + // the stream has no effect on how the server ends up chunking blobs up. + Put(ctx context.Context, opts ...grpc.CallOption) (BlobService_PutClient, error) +} + +type blobServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewBlobServiceClient(cc grpc.ClientConnInterface) BlobServiceClient { + return &blobServiceClient{cc} +} + +func (c *blobServiceClient) Stat(ctx context.Context, in *StatBlobRequest, opts ...grpc.CallOption) (*BlobMeta, error) { + out := new(BlobMeta) + err := c.cc.Invoke(ctx, BlobService_Stat_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *blobServiceClient) Read(ctx context.Context, in *ReadBlobRequest, opts ...grpc.CallOption) (BlobService_ReadClient, error) { + stream, err := c.cc.NewStream(ctx, &BlobService_ServiceDesc.Streams[0], BlobService_Read_FullMethodName, opts...) + if err != nil { + return nil, err + } + x := &blobServiceReadClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type BlobService_ReadClient interface { + Recv() (*BlobChunk, error) + grpc.ClientStream +} + +type blobServiceReadClient struct { + grpc.ClientStream +} + +func (x *blobServiceReadClient) Recv() (*BlobChunk, error) { + m := new(BlobChunk) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *blobServiceClient) Put(ctx context.Context, opts ...grpc.CallOption) (BlobService_PutClient, error) { + stream, err := c.cc.NewStream(ctx, &BlobService_ServiceDesc.Streams[1], BlobService_Put_FullMethodName, opts...) + if err != nil { + return nil, err + } + x := &blobServicePutClient{stream} + return x, nil +} + +type BlobService_PutClient interface { + Send(*BlobChunk) error + CloseAndRecv() (*PutBlobResponse, error) + grpc.ClientStream +} + +type blobServicePutClient struct { + grpc.ClientStream +} + +func (x *blobServicePutClient) Send(m *BlobChunk) error { + return x.ClientStream.SendMsg(m) +} + +func (x *blobServicePutClient) CloseAndRecv() (*PutBlobResponse, error) { + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + m := new(PutBlobResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// BlobServiceServer is the server API for BlobService service. +// All implementations must embed UnimplementedBlobServiceServer +// for forward compatibility +type BlobServiceServer interface { + // Stat exposes metadata about a given blob, + // such as more granular chunking, baos. + // It implicitly allows checking for existence too, as asking this for a + // non-existing Blob will return a Status::not_found grpc error. + // If there's no more granular chunking available, the response will simply + // contain a single chunk. + Stat(context.Context, *StatBlobRequest) (*BlobMeta, error) + // Read returns a stream of BlobChunk, which is just a stream of bytes with + // the digest specified in ReadBlobRequest. + // + // The server may decide on whatever chunking it may seem fit as a size for + // the individual BlobChunk sent in the response stream. + Read(*ReadBlobRequest, BlobService_ReadServer) error + // Put uploads a Blob, by reading a stream of bytes. + // + // The way the data is chunked up in individual BlobChunk messages sent in + // the stream has no effect on how the server ends up chunking blobs up. + Put(BlobService_PutServer) error + mustEmbedUnimplementedBlobServiceServer() +} + +// UnimplementedBlobServiceServer must be embedded to have forward compatible implementations. +type UnimplementedBlobServiceServer struct { +} + +func (UnimplementedBlobServiceServer) Stat(context.Context, *StatBlobRequest) (*BlobMeta, error) { + return nil, status.Errorf(codes.Unimplemented, "method Stat not implemented") +} +func (UnimplementedBlobServiceServer) Read(*ReadBlobRequest, BlobService_ReadServer) error { + return status.Errorf(codes.Unimplemented, "method Read not implemented") +} +func (UnimplementedBlobServiceServer) Put(BlobService_PutServer) error { + return status.Errorf(codes.Unimplemented, "method Put not implemented") +} +func (UnimplementedBlobServiceServer) mustEmbedUnimplementedBlobServiceServer() {} + +// UnsafeBlobServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to BlobServiceServer will +// result in compilation errors. +type UnsafeBlobServiceServer interface { + mustEmbedUnimplementedBlobServiceServer() +} + +func RegisterBlobServiceServer(s grpc.ServiceRegistrar, srv BlobServiceServer) { + s.RegisterService(&BlobService_ServiceDesc, srv) +} + +func _BlobService_Stat_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(StatBlobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BlobServiceServer).Stat(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: BlobService_Stat_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BlobServiceServer).Stat(ctx, req.(*StatBlobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BlobService_Read_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(ReadBlobRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(BlobServiceServer).Read(m, &blobServiceReadServer{stream}) +} + +type BlobService_ReadServer interface { + Send(*BlobChunk) error + grpc.ServerStream +} + +type blobServiceReadServer struct { + grpc.ServerStream +} + +func (x *blobServiceReadServer) Send(m *BlobChunk) error { + return x.ServerStream.SendMsg(m) +} + +func _BlobService_Put_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(BlobServiceServer).Put(&blobServicePutServer{stream}) +} + +type BlobService_PutServer interface { + SendAndClose(*PutBlobResponse) error + Recv() (*BlobChunk, error) + grpc.ServerStream +} + +type blobServicePutServer struct { + grpc.ServerStream +} + +func (x *blobServicePutServer) SendAndClose(m *PutBlobResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *blobServicePutServer) Recv() (*BlobChunk, error) { + m := new(BlobChunk) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// BlobService_ServiceDesc is the grpc.ServiceDesc for BlobService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var BlobService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "tvix.store.v1.BlobService", + HandlerType: (*BlobServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Stat", + Handler: _BlobService_Stat_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "Read", + Handler: _BlobService_Read_Handler, + ServerStreams: true, + }, + { + StreamName: "Put", + Handler: _BlobService_Put_Handler, + ClientStreams: true, + }, + }, + Metadata: "tvix/store/protos/rpc_blobstore.proto", +} diff --git a/tvix/store/protos/rpc_directory.pb.go b/tvix/store/protos/rpc_directory.pb.go new file mode 100644 index 000000000000..5a1cdccca081 --- /dev/null +++ b/tvix/store/protos/rpc_directory.pb.go @@ -0,0 +1,271 @@ +// SPDX-License-Identifier: MIT +// Copyright © 2022 The Tvix Authors + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.29.1 +// protoc (unknown) +// source: tvix/store/protos/rpc_directory.proto + +package storev1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type GetDirectoryRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to ByWhat: + // + // *GetDirectoryRequest_Digest + ByWhat isGetDirectoryRequest_ByWhat `protobuf_oneof:"by_what"` + // If set to true, recursively resolve all child Directory messages. + // Directory messages SHOULD be streamed in a recursive breadth-first walk, + // but other orders are also fine, as long as Directory messages are only + // sent after they are referred to from previously sent Directory messages. + Recursive bool `protobuf:"varint,2,opt,name=recursive,proto3" json:"recursive,omitempty"` +} + +func (x *GetDirectoryRequest) Reset() { + *x = GetDirectoryRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_tvix_store_protos_rpc_directory_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetDirectoryRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetDirectoryRequest) ProtoMessage() {} + +func (x *GetDirectoryRequest) ProtoReflect() protoreflect.Message { + mi := &file_tvix_store_protos_rpc_directory_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetDirectoryRequest.ProtoReflect.Descriptor instead. +func (*GetDirectoryRequest) Descriptor() ([]byte, []int) { + return file_tvix_store_protos_rpc_directory_proto_rawDescGZIP(), []int{0} +} + +func (m *GetDirectoryRequest) GetByWhat() isGetDirectoryRequest_ByWhat { + if m != nil { + return m.ByWhat + } + return nil +} + +func (x *GetDirectoryRequest) GetDigest() []byte { + if x, ok := x.GetByWhat().(*GetDirectoryRequest_Digest); ok { + return x.Digest + } + return nil +} + +func (x *GetDirectoryRequest) GetRecursive() bool { + if x != nil { + return x.Recursive + } + return false +} + +type isGetDirectoryRequest_ByWhat interface { + isGetDirectoryRequest_ByWhat() +} + +type GetDirectoryRequest_Digest struct { + // The blake3 hash of the (root) Directory message, serialized in + // protobuf canonical form. + // Keep in mind this can be a subtree of another root. + Digest []byte `protobuf:"bytes,1,opt,name=digest,proto3,oneof"` +} + +func (*GetDirectoryRequest_Digest) isGetDirectoryRequest_ByWhat() {} + +type PutDirectoryResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + RootDigest []byte `protobuf:"bytes,1,opt,name=root_digest,json=rootDigest,proto3" json:"root_digest,omitempty"` +} + +func (x *PutDirectoryResponse) Reset() { + *x = PutDirectoryResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_tvix_store_protos_rpc_directory_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PutDirectoryResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PutDirectoryResponse) ProtoMessage() {} + +func (x *PutDirectoryResponse) ProtoReflect() protoreflect.Message { + mi := &file_tvix_store_protos_rpc_directory_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PutDirectoryResponse.ProtoReflect.Descriptor instead. +func (*PutDirectoryResponse) Descriptor() ([]byte, []int) { + return file_tvix_store_protos_rpc_directory_proto_rawDescGZIP(), []int{1} +} + +func (x *PutDirectoryResponse) GetRootDigest() []byte { + if x != nil { + return x.RootDigest + } + return nil +} + +var File_tvix_store_protos_rpc_directory_proto protoreflect.FileDescriptor + +var file_tvix_store_protos_rpc_directory_proto_rawDesc = []byte{ + 0x0a, 0x25, 0x74, 0x76, 0x69, 0x78, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x73, 0x2f, 0x72, 0x70, 0x63, 0x5f, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, + 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0d, 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, + 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x1a, 0x1f, 0x74, 0x76, 0x69, 0x78, 0x2f, 0x73, 0x74, 0x6f, + 0x72, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x63, 0x61, 0x73, 0x74, 0x6f, 0x72, + 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x58, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x44, 0x69, + 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, + 0x0a, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, + 0x52, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x72, 0x65, 0x63, 0x75, + 0x72, 0x73, 0x69, 0x76, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x72, 0x65, 0x63, + 0x75, 0x72, 0x73, 0x69, 0x76, 0x65, 0x42, 0x09, 0x0a, 0x07, 0x62, 0x79, 0x5f, 0x77, 0x68, 0x61, + 0x74, 0x22, 0x37, 0x0a, 0x14, 0x50, 0x75, 0x74, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, + 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x72, 0x6f, 0x6f, + 0x74, 0x5f, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0a, + 0x72, 0x6f, 0x6f, 0x74, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x32, 0xa1, 0x01, 0x0a, 0x10, 0x44, + 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, + 0x45, 0x0a, 0x03, 0x47, 0x65, 0x74, 0x12, 0x22, 0x2e, 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, + 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x74, 0x76, 0x69, + 0x78, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x69, 0x72, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x79, 0x30, 0x01, 0x12, 0x46, 0x0a, 0x03, 0x50, 0x75, 0x74, 0x12, 0x18, 0x2e, + 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x69, + 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x1a, 0x23, 0x2e, 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, + 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x74, 0x44, 0x69, 0x72, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x28, 0x01, 0x42, 0x28, + 0x5a, 0x26, 0x63, 0x6f, 0x64, 0x65, 0x2e, 0x74, 0x76, 0x6c, 0x2e, 0x66, 0x79, 0x69, 0x2f, 0x74, + 0x76, 0x69, 0x78, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, + 0x3b, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_tvix_store_protos_rpc_directory_proto_rawDescOnce sync.Once + file_tvix_store_protos_rpc_directory_proto_rawDescData = file_tvix_store_protos_rpc_directory_proto_rawDesc +) + +func file_tvix_store_protos_rpc_directory_proto_rawDescGZIP() []byte { + file_tvix_store_protos_rpc_directory_proto_rawDescOnce.Do(func() { + file_tvix_store_protos_rpc_directory_proto_rawDescData = protoimpl.X.CompressGZIP(file_tvix_store_protos_rpc_directory_proto_rawDescData) + }) + return file_tvix_store_protos_rpc_directory_proto_rawDescData +} + +var file_tvix_store_protos_rpc_directory_proto_msgTypes = make([]protoimpl.MessageInfo, 2) +var file_tvix_store_protos_rpc_directory_proto_goTypes = []interface{}{ + (*GetDirectoryRequest)(nil), // 0: tvix.store.v1.GetDirectoryRequest + (*PutDirectoryResponse)(nil), // 1: tvix.store.v1.PutDirectoryResponse + (*Directory)(nil), // 2: tvix.store.v1.Directory +} +var file_tvix_store_protos_rpc_directory_proto_depIdxs = []int32{ + 0, // 0: tvix.store.v1.DirectoryService.Get:input_type -> tvix.store.v1.GetDirectoryRequest + 2, // 1: tvix.store.v1.DirectoryService.Put:input_type -> tvix.store.v1.Directory + 2, // 2: tvix.store.v1.DirectoryService.Get:output_type -> tvix.store.v1.Directory + 1, // 3: tvix.store.v1.DirectoryService.Put:output_type -> tvix.store.v1.PutDirectoryResponse + 2, // [2:4] is the sub-list for method output_type + 0, // [0:2] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_tvix_store_protos_rpc_directory_proto_init() } +func file_tvix_store_protos_rpc_directory_proto_init() { + if File_tvix_store_protos_rpc_directory_proto != nil { + return + } + file_tvix_store_protos_castore_proto_init() + if !protoimpl.UnsafeEnabled { + file_tvix_store_protos_rpc_directory_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetDirectoryRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_tvix_store_protos_rpc_directory_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PutDirectoryResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_tvix_store_protos_rpc_directory_proto_msgTypes[0].OneofWrappers = []interface{}{ + (*GetDirectoryRequest_Digest)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_tvix_store_protos_rpc_directory_proto_rawDesc, + NumEnums: 0, + NumMessages: 2, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_tvix_store_protos_rpc_directory_proto_goTypes, + DependencyIndexes: file_tvix_store_protos_rpc_directory_proto_depIdxs, + MessageInfos: file_tvix_store_protos_rpc_directory_proto_msgTypes, + }.Build() + File_tvix_store_protos_rpc_directory_proto = out.File + file_tvix_store_protos_rpc_directory_proto_rawDesc = nil + file_tvix_store_protos_rpc_directory_proto_goTypes = nil + file_tvix_store_protos_rpc_directory_proto_depIdxs = nil +} diff --git a/tvix/store/protos/rpc_directory.proto b/tvix/store/protos/rpc_directory.proto new file mode 100644 index 000000000000..0aeed5c3c0e1 --- /dev/null +++ b/tvix/store/protos/rpc_directory.proto @@ -0,0 +1,48 @@ +// SPDX-License-Identifier: MIT +// Copyright © 2022 The Tvix Authors +syntax = "proto3"; + +package tvix.store.v1; + +import "tvix/store/protos/castore.proto"; + +option go_package = "code.tvl.fyi/tvix/store/protos;storev1"; + +service DirectoryService { + // Get retrieves a stream of Directory messages, by using the lookup + // parameters in GetDirectoryRequest. + // Keep in mind multiple DirectoryNodes in different parts of the graph might + // have the same digest if they have the same underlying contents, + // so sending subsequent ones can be omitted. + rpc Get(GetDirectoryRequest) returns (stream Directory); + + // Put uploads a graph of Directory messages. + // Individual Directory messages need to be send in an order walking up + // from the leaves to the root - a Directory message can only refer to + // Directory messages previously sent in the same stream. + // Keep in mind multiple DirectoryNodes in different parts of the graph might + // have the same digest if they have the same underlying contents, + // so sending subsequent ones can be omitted. + // We might add a separate method, allowing to send partial graphs at a later + // time, if requiring to send the full graph turns out to be a problem. + rpc Put(stream Directory) returns (PutDirectoryResponse); +} + +message GetDirectoryRequest { + oneof by_what { + // The blake3 hash of the (root) Directory message, serialized in + // protobuf canonical form. + // Keep in mind this can be a subtree of another root. + bytes digest = 1; + } + + // If set to true, recursively resolve all child Directory messages. + // Directory messages SHOULD be streamed in a recursive breadth-first walk, + // but other orders are also fine, as long as Directory messages are only + // sent after they are referred to from previously sent Directory messages. + bool recursive = 2; +} + +message PutDirectoryResponse { + bytes root_digest = 1; +} diff --git a/tvix/store/protos/rpc_directory_grpc.pb.go b/tvix/store/protos/rpc_directory_grpc.pb.go new file mode 100644 index 000000000000..a578dbd89d39 --- /dev/null +++ b/tvix/store/protos/rpc_directory_grpc.pb.go @@ -0,0 +1,238 @@ +// SPDX-License-Identifier: MIT +// Copyright © 2022 The Tvix Authors + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.3.0 +// - protoc (unknown) +// source: tvix/store/protos/rpc_directory.proto + +package storev1 + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +const ( + DirectoryService_Get_FullMethodName = "/tvix.store.v1.DirectoryService/Get" + DirectoryService_Put_FullMethodName = "/tvix.store.v1.DirectoryService/Put" +) + +// DirectoryServiceClient is the client API for DirectoryService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type DirectoryServiceClient interface { + // Get retrieves a stream of Directory messages, by using the lookup + // parameters in GetDirectoryRequest. + // Keep in mind multiple DirectoryNodes in different parts of the graph might + // have the same digest if they have the same underlying contents, + // so sending subsequent ones can be omitted. + Get(ctx context.Context, in *GetDirectoryRequest, opts ...grpc.CallOption) (DirectoryService_GetClient, error) + // Put uploads a graph of Directory messages. + // Individual Directory messages need to be send in an order walking up + // from the leaves to the root - a Directory message can only refer to + // Directory messages previously sent in the same stream. + // Keep in mind multiple DirectoryNodes in different parts of the graph might + // have the same digest if they have the same underlying contents, + // so sending subsequent ones can be omitted. + // We might add a separate method, allowing to send partial graphs at a later + // time, if requiring to send the full graph turns out to be a problem. + Put(ctx context.Context, opts ...grpc.CallOption) (DirectoryService_PutClient, error) +} + +type directoryServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewDirectoryServiceClient(cc grpc.ClientConnInterface) DirectoryServiceClient { + return &directoryServiceClient{cc} +} + +func (c *directoryServiceClient) Get(ctx context.Context, in *GetDirectoryRequest, opts ...grpc.CallOption) (DirectoryService_GetClient, error) { + stream, err := c.cc.NewStream(ctx, &DirectoryService_ServiceDesc.Streams[0], DirectoryService_Get_FullMethodName, opts...) + if err != nil { + return nil, err + } + x := &directoryServiceGetClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type DirectoryService_GetClient interface { + Recv() (*Directory, error) + grpc.ClientStream +} + +type directoryServiceGetClient struct { + grpc.ClientStream +} + +func (x *directoryServiceGetClient) Recv() (*Directory, error) { + m := new(Directory) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *directoryServiceClient) Put(ctx context.Context, opts ...grpc.CallOption) (DirectoryService_PutClient, error) { + stream, err := c.cc.NewStream(ctx, &DirectoryService_ServiceDesc.Streams[1], DirectoryService_Put_FullMethodName, opts...) + if err != nil { + return nil, err + } + x := &directoryServicePutClient{stream} + return x, nil +} + +type DirectoryService_PutClient interface { + Send(*Directory) error + CloseAndRecv() (*PutDirectoryResponse, error) + grpc.ClientStream +} + +type directoryServicePutClient struct { + grpc.ClientStream +} + +func (x *directoryServicePutClient) Send(m *Directory) error { + return x.ClientStream.SendMsg(m) +} + +func (x *directoryServicePutClient) CloseAndRecv() (*PutDirectoryResponse, error) { + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + m := new(PutDirectoryResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// DirectoryServiceServer is the server API for DirectoryService service. +// All implementations must embed UnimplementedDirectoryServiceServer +// for forward compatibility +type DirectoryServiceServer interface { + // Get retrieves a stream of Directory messages, by using the lookup + // parameters in GetDirectoryRequest. + // Keep in mind multiple DirectoryNodes in different parts of the graph might + // have the same digest if they have the same underlying contents, + // so sending subsequent ones can be omitted. + Get(*GetDirectoryRequest, DirectoryService_GetServer) error + // Put uploads a graph of Directory messages. + // Individual Directory messages need to be send in an order walking up + // from the leaves to the root - a Directory message can only refer to + // Directory messages previously sent in the same stream. + // Keep in mind multiple DirectoryNodes in different parts of the graph might + // have the same digest if they have the same underlying contents, + // so sending subsequent ones can be omitted. + // We might add a separate method, allowing to send partial graphs at a later + // time, if requiring to send the full graph turns out to be a problem. + Put(DirectoryService_PutServer) error + mustEmbedUnimplementedDirectoryServiceServer() +} + +// UnimplementedDirectoryServiceServer must be embedded to have forward compatible implementations. +type UnimplementedDirectoryServiceServer struct { +} + +func (UnimplementedDirectoryServiceServer) Get(*GetDirectoryRequest, DirectoryService_GetServer) error { + return status.Errorf(codes.Unimplemented, "method Get not implemented") +} +func (UnimplementedDirectoryServiceServer) Put(DirectoryService_PutServer) error { + return status.Errorf(codes.Unimplemented, "method Put not implemented") +} +func (UnimplementedDirectoryServiceServer) mustEmbedUnimplementedDirectoryServiceServer() {} + +// UnsafeDirectoryServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to DirectoryServiceServer will +// result in compilation errors. +type UnsafeDirectoryServiceServer interface { + mustEmbedUnimplementedDirectoryServiceServer() +} + +func RegisterDirectoryServiceServer(s grpc.ServiceRegistrar, srv DirectoryServiceServer) { + s.RegisterService(&DirectoryService_ServiceDesc, srv) +} + +func _DirectoryService_Get_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(GetDirectoryRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(DirectoryServiceServer).Get(m, &directoryServiceGetServer{stream}) +} + +type DirectoryService_GetServer interface { + Send(*Directory) error + grpc.ServerStream +} + +type directoryServiceGetServer struct { + grpc.ServerStream +} + +func (x *directoryServiceGetServer) Send(m *Directory) error { + return x.ServerStream.SendMsg(m) +} + +func _DirectoryService_Put_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(DirectoryServiceServer).Put(&directoryServicePutServer{stream}) +} + +type DirectoryService_PutServer interface { + SendAndClose(*PutDirectoryResponse) error + Recv() (*Directory, error) + grpc.ServerStream +} + +type directoryServicePutServer struct { + grpc.ServerStream +} + +func (x *directoryServicePutServer) SendAndClose(m *PutDirectoryResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *directoryServicePutServer) Recv() (*Directory, error) { + m := new(Directory) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// DirectoryService_ServiceDesc is the grpc.ServiceDesc for DirectoryService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var DirectoryService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "tvix.store.v1.DirectoryService", + HandlerType: (*DirectoryServiceServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "Get", + Handler: _DirectoryService_Get_Handler, + ServerStreams: true, + }, + { + StreamName: "Put", + Handler: _DirectoryService_Put_Handler, + ClientStreams: true, + }, + }, + Metadata: "tvix/store/protos/rpc_directory.proto", +} diff --git a/tvix/store/protos/rpc_pathinfo.pb.go b/tvix/store/protos/rpc_pathinfo.pb.go new file mode 100644 index 000000000000..bbbf3122ba23 --- /dev/null +++ b/tvix/store/protos/rpc_pathinfo.pb.go @@ -0,0 +1,284 @@ +// SPDX-License-Identifier: MIT +// Copyright © 2022 The Tvix Authors + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.29.1 +// protoc (unknown) +// source: tvix/store/protos/rpc_pathinfo.proto + +package storev1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +// GetPathInfoRequest describes the lookup parameters that can be used to +// lookup a PathInfo objects. +// Currently, only a lookup by output hash is supported. +type GetPathInfoRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to ByWhat: + // + // *GetPathInfoRequest_ByOutputHash + ByWhat isGetPathInfoRequest_ByWhat `protobuf_oneof:"by_what"` +} + +func (x *GetPathInfoRequest) Reset() { + *x = GetPathInfoRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_tvix_store_protos_rpc_pathinfo_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetPathInfoRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetPathInfoRequest) ProtoMessage() {} + +func (x *GetPathInfoRequest) ProtoReflect() protoreflect.Message { + mi := &file_tvix_store_protos_rpc_pathinfo_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetPathInfoRequest.ProtoReflect.Descriptor instead. +func (*GetPathInfoRequest) Descriptor() ([]byte, []int) { + return file_tvix_store_protos_rpc_pathinfo_proto_rawDescGZIP(), []int{0} +} + +func (m *GetPathInfoRequest) GetByWhat() isGetPathInfoRequest_ByWhat { + if m != nil { + return m.ByWhat + } + return nil +} + +func (x *GetPathInfoRequest) GetByOutputHash() []byte { + if x, ok := x.GetByWhat().(*GetPathInfoRequest_ByOutputHash); ok { + return x.ByOutputHash + } + return nil +} + +type isGetPathInfoRequest_ByWhat interface { + isGetPathInfoRequest_ByWhat() +} + +type GetPathInfoRequest_ByOutputHash struct { + // The output hash of a nix path (20 bytes). + // This is the nixbase32-decoded portion of a Nix output path, so to substitute + // /nix/store/xm35nga2g20mz5sm5l6n8v3bdm86yj83-cowsay-3.04 + // this field would contain nixbase32dec("xm35nga2g20mz5sm5l6n8v3bdm86yj83"). + ByOutputHash []byte `protobuf:"bytes,1,opt,name=by_output_hash,json=byOutputHash,proto3,oneof"` +} + +func (*GetPathInfoRequest_ByOutputHash) isGetPathInfoRequest_ByWhat() {} + +// CalculateNARResponse is the response returned by the CalculateNAR request. +// +// It contains the size of the NAR representation (in bytes), and the sha56 +// digest. +type CalculateNARResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // This size of the NAR file, in bytes. + NarSize uint64 `protobuf:"varint,1,opt,name=nar_size,json=narSize,proto3" json:"nar_size,omitempty"` + // The sha256 of the NAR file representation. + NarSha256 []byte `protobuf:"bytes,2,opt,name=nar_sha256,json=narSha256,proto3" json:"nar_sha256,omitempty"` +} + +func (x *CalculateNARResponse) Reset() { + *x = CalculateNARResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_tvix_store_protos_rpc_pathinfo_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CalculateNARResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CalculateNARResponse) ProtoMessage() {} + +func (x *CalculateNARResponse) ProtoReflect() protoreflect.Message { + mi := &file_tvix_store_protos_rpc_pathinfo_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CalculateNARResponse.ProtoReflect.Descriptor instead. +func (*CalculateNARResponse) Descriptor() ([]byte, []int) { + return file_tvix_store_protos_rpc_pathinfo_proto_rawDescGZIP(), []int{1} +} + +func (x *CalculateNARResponse) GetNarSize() uint64 { + if x != nil { + return x.NarSize + } + return 0 +} + +func (x *CalculateNARResponse) GetNarSha256() []byte { + if x != nil { + return x.NarSha256 + } + return nil +} + +var File_tvix_store_protos_rpc_pathinfo_proto protoreflect.FileDescriptor + +var file_tvix_store_protos_rpc_pathinfo_proto_rawDesc = []byte{ + 0x0a, 0x24, 0x74, 0x76, 0x69, 0x78, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x73, 0x2f, 0x72, 0x70, 0x63, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x69, 0x6e, 0x66, 0x6f, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0d, 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, 0x6f, + 0x72, 0x65, 0x2e, 0x76, 0x31, 0x1a, 0x20, 0x74, 0x76, 0x69, 0x78, 0x2f, 0x73, 0x74, 0x6f, 0x72, + 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x70, 0x61, 0x74, 0x68, 0x69, 0x6e, 0x66, + 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x47, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x50, 0x61, + 0x74, 0x68, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x26, 0x0a, + 0x0e, 0x62, 0x79, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x0c, 0x62, 0x79, 0x4f, 0x75, 0x74, 0x70, 0x75, + 0x74, 0x48, 0x61, 0x73, 0x68, 0x42, 0x09, 0x0a, 0x07, 0x62, 0x79, 0x5f, 0x77, 0x68, 0x61, 0x74, + 0x22, 0x50, 0x0a, 0x14, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x41, 0x52, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x6e, 0x61, 0x72, 0x5f, + 0x73, 0x69, 0x7a, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x07, 0x6e, 0x61, 0x72, 0x53, + 0x69, 0x7a, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x6e, 0x61, 0x72, 0x5f, 0x73, 0x68, 0x61, 0x32, 0x35, + 0x36, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x09, 0x6e, 0x61, 0x72, 0x53, 0x68, 0x61, 0x32, + 0x35, 0x36, 0x32, 0xd7, 0x01, 0x0a, 0x0f, 0x50, 0x61, 0x74, 0x68, 0x49, 0x6e, 0x66, 0x6f, 0x53, + 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x41, 0x0a, 0x03, 0x47, 0x65, 0x74, 0x12, 0x21, 0x2e, + 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, + 0x74, 0x50, 0x61, 0x74, 0x68, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x17, 0x2e, 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, + 0x2e, 0x50, 0x61, 0x74, 0x68, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x37, 0x0a, 0x03, 0x50, 0x75, 0x74, + 0x12, 0x17, 0x2e, 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, + 0x2e, 0x50, 0x61, 0x74, 0x68, 0x49, 0x6e, 0x66, 0x6f, 0x1a, 0x17, 0x2e, 0x74, 0x76, 0x69, 0x78, + 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x61, 0x74, 0x68, 0x49, 0x6e, + 0x66, 0x6f, 0x12, 0x48, 0x0a, 0x0c, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x65, 0x4e, + 0x41, 0x52, 0x12, 0x13, 0x2e, 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, + 0x76, 0x31, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x1a, 0x23, 0x2e, 0x74, 0x76, 0x69, 0x78, 0x2e, 0x73, + 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, + 0x65, 0x4e, 0x41, 0x52, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x28, 0x5a, 0x26, + 0x63, 0x6f, 0x64, 0x65, 0x2e, 0x74, 0x76, 0x6c, 0x2e, 0x66, 0x79, 0x69, 0x2f, 0x74, 0x76, 0x69, + 0x78, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x3b, 0x73, + 0x74, 0x6f, 0x72, 0x65, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_tvix_store_protos_rpc_pathinfo_proto_rawDescOnce sync.Once + file_tvix_store_protos_rpc_pathinfo_proto_rawDescData = file_tvix_store_protos_rpc_pathinfo_proto_rawDesc +) + +func file_tvix_store_protos_rpc_pathinfo_proto_rawDescGZIP() []byte { + file_tvix_store_protos_rpc_pathinfo_proto_rawDescOnce.Do(func() { + file_tvix_store_protos_rpc_pathinfo_proto_rawDescData = protoimpl.X.CompressGZIP(file_tvix_store_protos_rpc_pathinfo_proto_rawDescData) + }) + return file_tvix_store_protos_rpc_pathinfo_proto_rawDescData +} + +var file_tvix_store_protos_rpc_pathinfo_proto_msgTypes = make([]protoimpl.MessageInfo, 2) +var file_tvix_store_protos_rpc_pathinfo_proto_goTypes = []interface{}{ + (*GetPathInfoRequest)(nil), // 0: tvix.store.v1.GetPathInfoRequest + (*CalculateNARResponse)(nil), // 1: tvix.store.v1.CalculateNARResponse + (*PathInfo)(nil), // 2: tvix.store.v1.PathInfo + (*Node)(nil), // 3: tvix.store.v1.Node +} +var file_tvix_store_protos_rpc_pathinfo_proto_depIdxs = []int32{ + 0, // 0: tvix.store.v1.PathInfoService.Get:input_type -> tvix.store.v1.GetPathInfoRequest + 2, // 1: tvix.store.v1.PathInfoService.Put:input_type -> tvix.store.v1.PathInfo + 3, // 2: tvix.store.v1.PathInfoService.CalculateNAR:input_type -> tvix.store.v1.Node + 2, // 3: tvix.store.v1.PathInfoService.Get:output_type -> tvix.store.v1.PathInfo + 2, // 4: tvix.store.v1.PathInfoService.Put:output_type -> tvix.store.v1.PathInfo + 1, // 5: tvix.store.v1.PathInfoService.CalculateNAR:output_type -> tvix.store.v1.CalculateNARResponse + 3, // [3:6] is the sub-list for method output_type + 0, // [0:3] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_tvix_store_protos_rpc_pathinfo_proto_init() } +func file_tvix_store_protos_rpc_pathinfo_proto_init() { + if File_tvix_store_protos_rpc_pathinfo_proto != nil { + return + } + file_tvix_store_protos_pathinfo_proto_init() + if !protoimpl.UnsafeEnabled { + file_tvix_store_protos_rpc_pathinfo_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetPathInfoRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_tvix_store_protos_rpc_pathinfo_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CalculateNARResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_tvix_store_protos_rpc_pathinfo_proto_msgTypes[0].OneofWrappers = []interface{}{ + (*GetPathInfoRequest_ByOutputHash)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_tvix_store_protos_rpc_pathinfo_proto_rawDesc, + NumEnums: 0, + NumMessages: 2, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_tvix_store_protos_rpc_pathinfo_proto_goTypes, + DependencyIndexes: file_tvix_store_protos_rpc_pathinfo_proto_depIdxs, + MessageInfos: file_tvix_store_protos_rpc_pathinfo_proto_msgTypes, + }.Build() + File_tvix_store_protos_rpc_pathinfo_proto = out.File + file_tvix_store_protos_rpc_pathinfo_proto_rawDesc = nil + file_tvix_store_protos_rpc_pathinfo_proto_goTypes = nil + file_tvix_store_protos_rpc_pathinfo_proto_depIdxs = nil +} diff --git a/tvix/store/protos/rpc_pathinfo.proto b/tvix/store/protos/rpc_pathinfo.proto new file mode 100644 index 000000000000..991abf6efa99 --- /dev/null +++ b/tvix/store/protos/rpc_pathinfo.proto @@ -0,0 +1,73 @@ +// SPDX-License-Identifier: MIT +// Copyright © 2022 The Tvix Authors +syntax = "proto3"; + +package tvix.store.v1; + +import "tvix/store/protos/pathinfo.proto"; + +option go_package = "code.tvl.fyi/tvix/store/protos;storev1"; + +service PathInfoService { + // Return a PathInfo message, identified by the decoded nixbase32 part + // of a Nix output path. + // + // To substitute /nix/store/xm35nga2g20mz5sm5l6n8v3bdm86yj83- + // cowsay-3.04 the bytes in the request would be + // nixbase32dec("xm35nga2g20mz5sm5l6n8v3bdm86yj83"). + rpc Get(GetPathInfoRequest) returns (PathInfo); + + // Put uploads a PathInfo object to the remote end. It MUST not return + // until the PathInfo object has been written on the the remote end. + // + // The remote end MAY check if a potential DirectoryNode has already been + // uploaded. + // + // Uploading clients SHOULD obviously not steer other machines to try to + // substitute before from the remote end before having finished uploading + // PathInfo, Directories and Blobs. + // The returned PathInfo object MAY contain additional narinfo signatures, + // but is otherwise left untouched. + rpc Put(PathInfo) returns (PathInfo); + + + // Calculate the NAR representation of the contents specified by the + // root_node. The calculation SHOULD be cached server-side for subsequent + // requests. + // + // All references (to blobs or Directory messages) MUST already exist in + // the store. + // + // The method can be used to produce a Nix fixed-output path, which + // contains the (compressed) sha256 of the NAR content representation in + // the root_node name (suffixed with the name). + // + // It can also be used to calculate arbitrary NAR hashes of output paths, + // in case a legacy Nix Binary Cache frontend is provided. + rpc CalculateNAR(Node) returns (CalculateNARResponse); +} + +// GetPathInfoRequest describes the lookup parameters that can be used to +// lookup a PathInfo objects. +// Currently, only a lookup by output hash is supported. +message GetPathInfoRequest { + oneof by_what { + // The output hash of a nix path (20 bytes). + // This is the nixbase32-decoded portion of a Nix output path, so to substitute + // /nix/store/xm35nga2g20mz5sm5l6n8v3bdm86yj83-cowsay-3.04 + // this field would contain nixbase32dec("xm35nga2g20mz5sm5l6n8v3bdm86yj83"). + bytes by_output_hash = 1; + }; +} + +// CalculateNARResponse is the response returned by the CalculateNAR request. +// +// It contains the size of the NAR representation (in bytes), and the sha56 +// digest. +message CalculateNARResponse { + // This size of the NAR file, in bytes. + uint64 nar_size = 1; + + // The sha256 of the NAR file representation. + bytes nar_sha256 = 2; +} diff --git a/tvix/store/protos/rpc_pathinfo_grpc.pb.go b/tvix/store/protos/rpc_pathinfo_grpc.pb.go new file mode 100644 index 000000000000..5a858a3324c6 --- /dev/null +++ b/tvix/store/protos/rpc_pathinfo_grpc.pb.go @@ -0,0 +1,246 @@ +// SPDX-License-Identifier: MIT +// Copyright © 2022 The Tvix Authors + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.3.0 +// - protoc (unknown) +// source: tvix/store/protos/rpc_pathinfo.proto + +package storev1 + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +const ( + PathInfoService_Get_FullMethodName = "/tvix.store.v1.PathInfoService/Get" + PathInfoService_Put_FullMethodName = "/tvix.store.v1.PathInfoService/Put" + PathInfoService_CalculateNAR_FullMethodName = "/tvix.store.v1.PathInfoService/CalculateNAR" +) + +// PathInfoServiceClient is the client API for PathInfoService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type PathInfoServiceClient interface { + // Return a PathInfo message, identified by the decoded nixbase32 part + // of a Nix output path. + // + // To substitute /nix/store/xm35nga2g20mz5sm5l6n8v3bdm86yj83- + // cowsay-3.04 the bytes in the request would be + // nixbase32dec("xm35nga2g20mz5sm5l6n8v3bdm86yj83"). + Get(ctx context.Context, in *GetPathInfoRequest, opts ...grpc.CallOption) (*PathInfo, error) + // Put uploads a PathInfo object to the remote end. It MUST not return + // until the PathInfo object has been written on the the remote end. + // + // The remote end MAY check if a potential DirectoryNode has already been + // uploaded. + // + // Uploading clients SHOULD obviously not steer other machines to try to + // substitute before from the remote end before having finished uploading + // PathInfo, Directories and Blobs. + // The returned PathInfo object MAY contain additional narinfo signatures, + // but is otherwise left untouched. + Put(ctx context.Context, in *PathInfo, opts ...grpc.CallOption) (*PathInfo, error) + // Calculate the NAR representation of the contents specified by the + // root_node. The calculation SHOULD be cached server-side for subsequent + // requests. + // + // All references (to blobs or Directory messages) MUST already exist in + // the store. + // + // The method can be used to produce a Nix fixed-output path, which + // contains the (compressed) sha256 of the NAR content representation in + // the root_node name (suffixed with the name). + // + // It can also be used to calculate arbitrary NAR hashes of output paths, + // in case a legacy Nix Binary Cache frontend is provided. + CalculateNAR(ctx context.Context, in *Node, opts ...grpc.CallOption) (*CalculateNARResponse, error) +} + +type pathInfoServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewPathInfoServiceClient(cc grpc.ClientConnInterface) PathInfoServiceClient { + return &pathInfoServiceClient{cc} +} + +func (c *pathInfoServiceClient) Get(ctx context.Context, in *GetPathInfoRequest, opts ...grpc.CallOption) (*PathInfo, error) { + out := new(PathInfo) + err := c.cc.Invoke(ctx, PathInfoService_Get_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pathInfoServiceClient) Put(ctx context.Context, in *PathInfo, opts ...grpc.CallOption) (*PathInfo, error) { + out := new(PathInfo) + err := c.cc.Invoke(ctx, PathInfoService_Put_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pathInfoServiceClient) CalculateNAR(ctx context.Context, in *Node, opts ...grpc.CallOption) (*CalculateNARResponse, error) { + out := new(CalculateNARResponse) + err := c.cc.Invoke(ctx, PathInfoService_CalculateNAR_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// PathInfoServiceServer is the server API for PathInfoService service. +// All implementations must embed UnimplementedPathInfoServiceServer +// for forward compatibility +type PathInfoServiceServer interface { + // Return a PathInfo message, identified by the decoded nixbase32 part + // of a Nix output path. + // + // To substitute /nix/store/xm35nga2g20mz5sm5l6n8v3bdm86yj83- + // cowsay-3.04 the bytes in the request would be + // nixbase32dec("xm35nga2g20mz5sm5l6n8v3bdm86yj83"). + Get(context.Context, *GetPathInfoRequest) (*PathInfo, error) + // Put uploads a PathInfo object to the remote end. It MUST not return + // until the PathInfo object has been written on the the remote end. + // + // The remote end MAY check if a potential DirectoryNode has already been + // uploaded. + // + // Uploading clients SHOULD obviously not steer other machines to try to + // substitute before from the remote end before having finished uploading + // PathInfo, Directories and Blobs. + // The returned PathInfo object MAY contain additional narinfo signatures, + // but is otherwise left untouched. + Put(context.Context, *PathInfo) (*PathInfo, error) + // Calculate the NAR representation of the contents specified by the + // root_node. The calculation SHOULD be cached server-side for subsequent + // requests. + // + // All references (to blobs or Directory messages) MUST already exist in + // the store. + // + // The method can be used to produce a Nix fixed-output path, which + // contains the (compressed) sha256 of the NAR content representation in + // the root_node name (suffixed with the name). + // + // It can also be used to calculate arbitrary NAR hashes of output paths, + // in case a legacy Nix Binary Cache frontend is provided. + CalculateNAR(context.Context, *Node) (*CalculateNARResponse, error) + mustEmbedUnimplementedPathInfoServiceServer() +} + +// UnimplementedPathInfoServiceServer must be embedded to have forward compatible implementations. +type UnimplementedPathInfoServiceServer struct { +} + +func (UnimplementedPathInfoServiceServer) Get(context.Context, *GetPathInfoRequest) (*PathInfo, error) { + return nil, status.Errorf(codes.Unimplemented, "method Get not implemented") +} +func (UnimplementedPathInfoServiceServer) Put(context.Context, *PathInfo) (*PathInfo, error) { + return nil, status.Errorf(codes.Unimplemented, "method Put not implemented") +} +func (UnimplementedPathInfoServiceServer) CalculateNAR(context.Context, *Node) (*CalculateNARResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method CalculateNAR not implemented") +} +func (UnimplementedPathInfoServiceServer) mustEmbedUnimplementedPathInfoServiceServer() {} + +// UnsafePathInfoServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to PathInfoServiceServer will +// result in compilation errors. +type UnsafePathInfoServiceServer interface { + mustEmbedUnimplementedPathInfoServiceServer() +} + +func RegisterPathInfoServiceServer(s grpc.ServiceRegistrar, srv PathInfoServiceServer) { + s.RegisterService(&PathInfoService_ServiceDesc, srv) +} + +func _PathInfoService_Get_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetPathInfoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PathInfoServiceServer).Get(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PathInfoService_Get_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PathInfoServiceServer).Get(ctx, req.(*GetPathInfoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PathInfoService_Put_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PathInfo) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PathInfoServiceServer).Put(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PathInfoService_Put_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PathInfoServiceServer).Put(ctx, req.(*PathInfo)) + } + return interceptor(ctx, in, info, handler) +} + +func _PathInfoService_CalculateNAR_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Node) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PathInfoServiceServer).CalculateNAR(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PathInfoService_CalculateNAR_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PathInfoServiceServer).CalculateNAR(ctx, req.(*Node)) + } + return interceptor(ctx, in, info, handler) +} + +// PathInfoService_ServiceDesc is the grpc.ServiceDesc for PathInfoService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var PathInfoService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "tvix.store.v1.PathInfoService", + HandlerType: (*PathInfoServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Get", + Handler: _PathInfoService_Get_Handler, + }, + { + MethodName: "Put", + Handler: _PathInfoService_Put_Handler, + }, + { + MethodName: "CalculateNAR", + Handler: _PathInfoService_CalculateNAR_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "tvix/store/protos/rpc_pathinfo.proto", +} diff --git a/tvix/store/src/bin/tvix-store.rs b/tvix/store/src/bin/tvix-store.rs new file mode 100644 index 000000000000..8c278c433997 --- /dev/null +++ b/tvix/store/src/bin/tvix-store.rs @@ -0,0 +1,206 @@ +use clap::Subcommand; +use data_encoding::BASE64; +use futures::future::try_join_all; +use std::io; +use std::path::Path; +use std::path::PathBuf; +use std::sync::Arc; +use tracing_subscriber::prelude::*; +use tvix_store::blobservice::GRPCBlobService; +use tvix_store::blobservice::SledBlobService; +use tvix_store::directoryservice::GRPCDirectoryService; +use tvix_store::directoryservice::SledDirectoryService; +use tvix_store::nar::GRPCNARCalculationService; +use tvix_store::nar::NonCachingNARCalculationService; +use tvix_store::pathinfoservice::GRPCPathInfoService; +use tvix_store::pathinfoservice::SledPathInfoService; +use tvix_store::proto::blob_service_client::BlobServiceClient; +use tvix_store::proto::blob_service_server::BlobServiceServer; +use tvix_store::proto::directory_service_client::DirectoryServiceClient; +use tvix_store::proto::directory_service_server::DirectoryServiceServer; +use tvix_store::proto::node::Node; +use tvix_store::proto::path_info_service_client::PathInfoServiceClient; +use tvix_store::proto::path_info_service_server::PathInfoServiceServer; +use tvix_store::proto::GRPCBlobServiceWrapper; +use tvix_store::proto::GRPCDirectoryServiceWrapper; +use tvix_store::proto::GRPCPathInfoServiceWrapper; +use tvix_store::TvixStoreIO; + +#[cfg(feature = "reflection")] +use tvix_store::proto::FILE_DESCRIPTOR_SET; + +use clap::Parser; +use tonic::{transport::Server, Result}; +use tracing::{info, Level}; + +#[derive(Parser)] +#[command(author, version, about, long_about = None)] +struct Cli { + /// Whether to log in JSON + #[arg(long)] + json: bool, + + #[arg(long)] + log_level: Option<Level>, + + #[command(subcommand)] + command: Commands, +} + +#[derive(Subcommand)] +enum Commands { + /// Runs the tvix-store daemon. + Daemon { + #[arg(long, short = 'l')] + listen_address: Option<String>, + }, + /// Imports a list of paths into the store (not using the daemon) + Import { + #[clap(value_name = "PATH")] + paths: Vec<PathBuf>, + }, +} + +#[tokio::main] +async fn main() -> Result<(), Box<dyn std::error::Error>> { + let cli = Cli::parse(); + + // configure log settings + let level = cli.log_level.unwrap_or(Level::INFO); + + let subscriber = tracing_subscriber::registry() + .with(if cli.json { + Some( + tracing_subscriber::fmt::Layer::new() + .with_writer(io::stdout.with_max_level(level)) + .json(), + ) + } else { + None + }) + .with(if !cli.json { + Some( + tracing_subscriber::fmt::Layer::new() + .with_writer(io::stdout.with_max_level(level)) + .pretty(), + ) + } else { + None + }); + + tracing::subscriber::set_global_default(subscriber).expect("Unable to set global subscriber"); + + match cli.command { + Commands::Daemon { listen_address } => { + // initialize stores + let blob_service = SledBlobService::new("blobs.sled".into())?; + let directory_service = SledDirectoryService::new("directories.sled".into())?; + let path_info_service = SledPathInfoService::new("pathinfo.sled".into())?; + + let listen_address = listen_address + .unwrap_or_else(|| "[::]:8000".to_string()) + .parse() + .unwrap(); + + let mut server = Server::builder(); + + let nar_calculation_service = NonCachingNARCalculationService::new( + blob_service.clone(), + directory_service.clone(), + ); + + #[allow(unused_mut)] + let mut router = server + .add_service(BlobServiceServer::new(GRPCBlobServiceWrapper::from( + blob_service, + ))) + .add_service(DirectoryServiceServer::new( + GRPCDirectoryServiceWrapper::from(directory_service), + )) + .add_service(PathInfoServiceServer::new(GRPCPathInfoServiceWrapper::new( + path_info_service, + nar_calculation_service, + ))); + + #[cfg(feature = "reflection")] + { + let reflection_svc = tonic_reflection::server::Builder::configure() + .register_encoded_file_descriptor_set(FILE_DESCRIPTOR_SET) + .build()?; + router = router.add_service(reflection_svc); + } + + info!("tvix-store listening on {}", listen_address); + + router.serve(listen_address).await?; + } + Commands::Import { paths } => { + let blob_service = GRPCBlobService::from_client( + BlobServiceClient::connect("http://[::1]:8000").await?, + ); + let directory_service = GRPCDirectoryService::from_client( + DirectoryServiceClient::connect("http://[::1]:8000").await?, + ); + let path_info_service_client = + PathInfoServiceClient::connect("http://[::1]:8000").await?; + let path_info_service = + GRPCPathInfoService::from_client(path_info_service_client.clone()); + let nar_calculation_service = + GRPCNARCalculationService::from_client(path_info_service_client); + + let io = Arc::new(TvixStoreIO::new( + blob_service, + directory_service, + path_info_service, + nar_calculation_service, + )); + + let tasks = paths + .iter() + .map(|path| { + let io_move = io.clone(); + let path = path.clone(); + let task: tokio::task::JoinHandle<Result<(), io::Error>> = + tokio::task::spawn_blocking(move || { + let path_info = io_move.import_path_with_pathinfo(&path)?; + print_node(&path_info.node.unwrap().node.unwrap(), &path); + Ok(()) + }); + task + }) + .collect::<Vec<tokio::task::JoinHandle<Result<(), io::Error>>>>(); + + try_join_all(tasks).await?; + } + }; + Ok(()) +} + +fn print_node(node: &Node, path: &Path) { + match node { + Node::Directory(directory_node) => { + info!( + path = ?path, + name = directory_node.name, + digest = BASE64.encode(&directory_node.digest), + "import successful", + ) + } + Node::File(file_node) => { + info!( + path = ?path, + name = file_node.name, + digest = BASE64.encode(&file_node.digest), + "import successful" + ) + } + Node::Symlink(symlink_node) => { + info!( + path = ?path, + name = symlink_node.name, + target = symlink_node.target, + "import successful" + ) + } + } +} diff --git a/tvix/store/src/blobservice/grpc.rs b/tvix/store/src/blobservice/grpc.rs new file mode 100644 index 000000000000..0b08fbf46ad9 --- /dev/null +++ b/tvix/store/src/blobservice/grpc.rs @@ -0,0 +1,217 @@ +use super::{BlobService, BlobWriter}; +use crate::{proto, B3Digest}; +use futures::sink::{SinkExt, SinkMapErr}; +use std::{collections::VecDeque, io}; +use tokio::task::JoinHandle; +use tokio_stream::{wrappers::ReceiverStream, StreamExt}; +use tokio_util::{ + io::{CopyToBytes, SinkWriter, SyncIoBridge}, + sync::{PollSendError, PollSender}, +}; +use tonic::{transport::Channel, Code, Status, Streaming}; +use tracing::instrument; + +/// Connects to a (remote) tvix-store BlobService over gRPC. +#[derive(Clone)] +pub struct GRPCBlobService { + /// A handle into the active tokio runtime. Necessary to spawn tasks. + tokio_handle: tokio::runtime::Handle, + + /// The internal reference to a gRPC client. + /// Cloning it is cheap, and it internally handles concurrent requests. + grpc_client: proto::blob_service_client::BlobServiceClient<Channel>, +} + +impl GRPCBlobService { + /// construct a [GRPCBlobService] from a [proto::blob_service_client::BlobServiceClient<Channel>], + /// and a [tokio::runtime::Handle]. + pub fn new( + grpc_client: proto::blob_service_client::BlobServiceClient<Channel>, + tokio_handle: tokio::runtime::Handle, + ) -> Self { + Self { + tokio_handle, + grpc_client, + } + } + /// construct a [GRPCBlobService] from a [proto::blob_service_client::BlobServiceClient<Channel>]. + /// panics if called outside the context of a tokio runtime. + pub fn from_client( + grpc_client: proto::blob_service_client::BlobServiceClient<Channel>, + ) -> Self { + Self { + tokio_handle: tokio::runtime::Handle::current(), + grpc_client, + } + } +} + +impl BlobService for GRPCBlobService { + type BlobReader = Box<dyn io::Read + Send>; + type BlobWriter = GRPCBlobWriter; + + #[instrument(skip(self, digest), fields(blob.digest=%digest))] + fn has(&self, digest: &B3Digest) -> Result<bool, crate::Error> { + // Get a new handle to the gRPC client, and copy the digest. + let mut grpc_client = self.grpc_client.clone(); + let digest = digest.clone(); + + let task: tokio::task::JoinHandle<Result<_, Status>> = + self.tokio_handle.spawn(async move { + Ok(grpc_client + .stat(proto::StatBlobRequest { + digest: digest.to_vec(), + ..Default::default() + }) + .await? + .into_inner()) + }); + + match self.tokio_handle.block_on(task)? { + Ok(_blob_meta) => Ok(true), + Err(e) if e.code() == Code::NotFound => Ok(false), + Err(e) => Err(crate::Error::StorageError(e.to_string())), + } + } + + // On success, this returns a Ok(Some(io::Read)), which can be used to read + // the contents of the Blob, identified by the digest. + fn open_read(&self, digest: &B3Digest) -> Result<Option<Self::BlobReader>, crate::Error> { + // Get a new handle to the gRPC client, and copy the digest. + let mut grpc_client = self.grpc_client.clone(); + let digest = digest.clone(); + + // Construct the task that'll send out the request and return the stream + // the gRPC client should use to send [proto::BlobChunk], or an error if + // the blob doesn't exist. + let task: tokio::task::JoinHandle<Result<Streaming<proto::BlobChunk>, Status>> = + self.tokio_handle.spawn(async move { + let stream = grpc_client + .read(proto::ReadBlobRequest { + digest: digest.to_vec(), + }) + .await? + .into_inner(); + + Ok(stream) + }); + + // This runs the task to completion, which on success will return a stream. + // On reading from it, we receive individual [proto::BlobChunk], so we + // massage this to a stream of bytes, + // then create an [AsyncRead], which we'll turn into a [io::Read], + // that's returned from the function. + match self.tokio_handle.block_on(task)? { + Ok(stream) => { + // map the stream of proto::BlobChunk to bytes. + let data_stream = stream.map(|x| { + x.map(|x| VecDeque::from(x.data)) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidInput, e)) + }); + + // Use StreamReader::new to convert to an AsyncRead. + let data_reader = tokio_util::io::StreamReader::new(data_stream); + + // Use SyncIoBridge to turn it into a sync Read. + let sync_reader = tokio_util::io::SyncIoBridge::new(data_reader); + Ok(Some(Box::new(sync_reader))) + } + Err(e) if e.code() == Code::NotFound => Ok(None), + Err(e) => Err(crate::Error::StorageError(e.to_string())), + } + } + + /// Returns a [Self::BlobWriter], that'll internally wrap each write in a + // [proto::BlobChunk] and which is passed to the + fn open_write(&self) -> Result<Self::BlobWriter, crate::Error> { + let mut grpc_client = self.grpc_client.clone(); + + // set up an mpsc channel passing around Bytes. + let (tx, rx) = tokio::sync::mpsc::channel::<bytes::Bytes>(10); + + // bytes arriving on the RX side are wrapped inside a + // [proto::BlobChunk], and a [ReceiverStream] is constructed. + let blobchunk_stream = + ReceiverStream::new(rx).map(|x| proto::BlobChunk { data: x.to_vec() }); + + // That receiver stream is used as a stream in the gRPC BlobService.put rpc call. + let task: tokio::task::JoinHandle<Result<_, Status>> = self + .tokio_handle + .spawn(async move { Ok(grpc_client.put(blobchunk_stream).await?.into_inner()) }); + + // The tx part of the channel is converted to a sink of byte chunks. + + // We need to make this a function pointer, not a closure. + fn convert_error(_: PollSendError<bytes::Bytes>) -> io::Error { + io::Error::from(io::ErrorKind::BrokenPipe) + } + + let sink = PollSender::new(tx) + .sink_map_err(convert_error as fn(PollSendError<bytes::Bytes>) -> io::Error); + // We need to explicitly cast here, otherwise rustc does error with "expected fn pointer, found fn item" + + // … which is turned into an [tokio::io::AsyncWrite]. + let async_writer = SinkWriter::new(CopyToBytes::new(sink)); + // … which is then turned into a [io::Write]. + let writer = SyncIoBridge::new(async_writer); + + Ok(GRPCBlobWriter { + tokio_handle: self.tokio_handle.clone(), // TODO: is the clone() ok here? + task, + inner_writer: writer, + }) + } +} + +type BridgedWriter = SyncIoBridge< + SinkWriter< + CopyToBytes< + SinkMapErr<PollSender<bytes::Bytes>, fn(PollSendError<bytes::Bytes>) -> io::Error>, + >, + >, +>; + +pub struct GRPCBlobWriter { + /// A handle into the active tokio runtime. Necessary to block on the task + /// containing the put request. + tokio_handle: tokio::runtime::Handle, + + /// The task containing the put request. + task: JoinHandle<Result<proto::PutBlobResponse, Status>>, + + /// The inner Writer. + inner_writer: BridgedWriter, +} + +impl BlobWriter for GRPCBlobWriter { + fn close(mut self) -> Result<B3Digest, crate::Error> { + // invoke shutdown, so the inner writer closes its internal tx side of + // the channel. + self.inner_writer + .shutdown() + .map_err(|e| crate::Error::StorageError(e.to_string()))?; + + // block on the RPC call to return. + // This ensures all chunks are sent out, and have been received by the + // backend. + match self.tokio_handle.block_on(self.task)? { + Ok(resp) => { + // return the digest from the response. + B3Digest::from_vec(resp.digest).map_err(|_| { + crate::Error::StorageError("invalid root digest length in response".to_string()) + }) + } + Err(e) => Err(crate::Error::StorageError(e.to_string())), + } + } +} + +impl io::Write for GRPCBlobWriter { + fn write(&mut self, buf: &[u8]) -> io::Result<usize> { + self.inner_writer.write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.inner_writer.flush() + } +} diff --git a/tvix/store/src/blobservice/memory.rs b/tvix/store/src/blobservice/memory.rs new file mode 100644 index 000000000000..1ee59d108743 --- /dev/null +++ b/tvix/store/src/blobservice/memory.rs @@ -0,0 +1,76 @@ +use std::io::Cursor; +use std::{ + collections::HashMap, + sync::{Arc, RwLock}, +}; +use tracing::{instrument, warn}; + +use super::{BlobService, BlobWriter}; +use crate::{B3Digest, Error}; + +#[derive(Clone, Default)] +pub struct MemoryBlobService { + db: Arc<RwLock<HashMap<B3Digest, Vec<u8>>>>, +} + +impl BlobService for MemoryBlobService { + type BlobReader = Cursor<Vec<u8>>; + type BlobWriter = MemoryBlobWriter; + + #[instrument(skip(self, digest), fields(blob.digest=%digest))] + fn has(&self, digest: &B3Digest) -> Result<bool, Error> { + let db = self.db.read().unwrap(); + Ok(db.contains_key(digest)) + } + + fn open_read(&self, digest: &B3Digest) -> Result<Option<Self::BlobReader>, Error> { + let db = self.db.read().unwrap(); + + Ok(db.get(digest).map(|x| Cursor::new(x.clone()))) + } + + #[instrument(skip(self))] + fn open_write(&self) -> Result<Self::BlobWriter, Error> { + Ok(MemoryBlobWriter::new(self.db.clone())) + } +} + +pub struct MemoryBlobWriter { + db: Arc<RwLock<HashMap<B3Digest, Vec<u8>>>>, + + buf: Vec<u8>, +} + +impl MemoryBlobWriter { + fn new(db: Arc<RwLock<HashMap<B3Digest, Vec<u8>>>>) -> Self { + Self { + buf: Vec::new(), + db, + } + } +} +impl std::io::Write for MemoryBlobWriter { + fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> { + self.buf.write(buf) + } + + fn flush(&mut self) -> std::io::Result<()> { + self.buf.flush() + } +} + +impl BlobWriter for MemoryBlobWriter { + fn close(self) -> Result<B3Digest, Error> { + // in this memory implementation, we don't actually bother hashing + // incrementally while writing, but do it at the end. + let mut hasher = blake3::Hasher::new(); + hasher.update(&self.buf); + let digest = B3Digest::from_vec(hasher.finalize().as_bytes().to_vec()).unwrap(); + + // open the database for writing. + let mut db = self.db.write()?; + db.insert(digest.clone(), self.buf); + + Ok(digest) + } +} diff --git a/tvix/store/src/blobservice/mod.rs b/tvix/store/src/blobservice/mod.rs new file mode 100644 index 000000000000..c5a2de124656 --- /dev/null +++ b/tvix/store/src/blobservice/mod.rs @@ -0,0 +1,41 @@ +use std::io; + +use crate::{B3Digest, Error}; + +mod grpc; +mod memory; +mod sled; + +pub use self::grpc::GRPCBlobService; +pub use self::memory::MemoryBlobService; +pub use self::sled::SledBlobService; + +/// The base trait all BlobService services need to implement. +/// It provides functions to check whether a given blob exists, +/// a way to get a [io::Read] to a blob, and a method to initiate writing a new +/// Blob, which returns a [BlobWriter], that can be used +pub trait BlobService { + type BlobReader: io::Read + Send + std::marker::Unpin; + type BlobWriter: BlobWriter + Send; + + /// Check if the service has the blob, by its content hash. + fn has(&self, digest: &B3Digest) -> Result<bool, Error>; + + /// Request a blob from the store, by its content hash. Returns a Option<BlobReader>. + fn open_read(&self, digest: &B3Digest) -> Result<Option<Self::BlobReader>, Error>; + + /// Insert a new blob into the store. Returns a [BlobWriter], which + /// implements [io::Write] and a [BlobWriter::close]. + /// TODO: is there any reason we want this to be a Result<>, and not just T? + fn open_write(&self) -> Result<Self::BlobWriter, Error>; +} + +/// A [io::Write] that you need to close() afterwards, and get back the digest +/// of the written blob. +pub trait BlobWriter: io::Write { + /// Signal there's no more data to be written, and return the digest of the + /// contents written. + /// + /// This consumes self, so it's not possible to close twice. + fn close(self) -> Result<B3Digest, Error>; +} diff --git a/tvix/store/src/blobservice/sled.rs b/tvix/store/src/blobservice/sled.rs new file mode 100644 index 000000000000..2b090335344d --- /dev/null +++ b/tvix/store/src/blobservice/sled.rs @@ -0,0 +1,94 @@ +use super::{BlobService, BlobWriter}; +use crate::{B3Digest, Error}; +use std::{ + io::{self, Cursor}, + path::PathBuf, +}; +use tracing::instrument; + +#[derive(Clone)] +pub struct SledBlobService { + db: sled::Db, +} + +impl SledBlobService { + pub fn new(p: PathBuf) -> Result<Self, sled::Error> { + let config = sled::Config::default().use_compression(true).path(p); + let db = config.open()?; + + Ok(Self { db }) + } + + pub fn new_temporary() -> Result<Self, sled::Error> { + let config = sled::Config::default().temporary(true); + let db = config.open()?; + + Ok(Self { db }) + } +} + +impl BlobService for SledBlobService { + type BlobReader = Cursor<Vec<u8>>; + type BlobWriter = SledBlobWriter; + + #[instrument(skip(self), fields(blob.digest=%digest))] + fn has(&self, digest: &B3Digest) -> Result<bool, Error> { + match self.db.contains_key(digest.to_vec()) { + Ok(has) => Ok(has), + Err(e) => Err(Error::StorageError(e.to_string())), + } + } + + #[instrument(skip(self), fields(blob.digest=%digest))] + fn open_read(&self, digest: &B3Digest) -> Result<Option<Self::BlobReader>, Error> { + match self.db.get(digest.to_vec()) { + Ok(None) => Ok(None), + Ok(Some(data)) => Ok(Some(Cursor::new(data[..].to_vec()))), + Err(e) => Err(Error::StorageError(e.to_string())), + } + } + + #[instrument(skip(self))] + fn open_write(&self) -> Result<Self::BlobWriter, Error> { + Ok(SledBlobWriter::new(self.db.clone())) + } +} + +pub struct SledBlobWriter { + db: sled::Db, + buf: Vec<u8>, + hasher: blake3::Hasher, +} + +impl SledBlobWriter { + pub fn new(db: sled::Db) -> Self { + Self { + buf: Vec::default(), + db, + hasher: blake3::Hasher::new(), + } + } +} + +impl io::Write for SledBlobWriter { + fn write(&mut self, buf: &[u8]) -> io::Result<usize> { + let bytes_written = self.buf.write(buf)?; + self.hasher.write(&buf[..bytes_written]) + } + + fn flush(&mut self) -> io::Result<()> { + self.buf.flush() + } +} + +impl BlobWriter for SledBlobWriter { + fn close(self) -> Result<B3Digest, Error> { + let digest = self.hasher.finalize(); + self.db + .insert(digest.as_bytes(), self.buf) + .map_err(|e| Error::StorageError(format!("unable to insert blob: {}", e)))?; + + // We know self.hasher is doing blake3 hashing, so this won't fail. + Ok(B3Digest::from_vec(digest.as_bytes().to_vec()).unwrap()) + } +} diff --git a/tvix/store/src/digests.rs b/tvix/store/src/digests.rs new file mode 100644 index 000000000000..441a059ee0b6 --- /dev/null +++ b/tvix/store/src/digests.rs @@ -0,0 +1,49 @@ +use data_encoding::BASE64; +use thiserror::Error; + +// FUTUREWORK: make generic + +#[derive(PartialEq, Eq, Hash, Debug)] +pub struct B3Digest(Vec<u8>); + +// TODO: allow converting these errors to crate::Error +#[derive(Error, Debug)] +pub enum Error { + #[error("invalid digest length: {0}")] + InvalidDigestLen(usize), +} + +impl B3Digest { + // constructs a [B3Digest] from a [Vec<u8>]. + // Returns an error if the digest has the wrong length. + pub fn from_vec(value: Vec<u8>) -> Result<Self, Error> { + if value.len() != 32 { + Err(Error::InvalidDigestLen(value.len())) + } else { + Ok(Self(value)) + } + } + + // returns a copy of the inner [Vec<u8>]. + pub fn to_vec(&self) -> Vec<u8> { + self.0.to_vec() + } +} + +impl From<&[u8; 32]> for B3Digest { + fn from(value: &[u8; 32]) -> Self { + Self(value.to_vec()) + } +} + +impl Clone for B3Digest { + fn clone(&self) -> Self { + Self(self.0.to_owned()) + } +} + +impl std::fmt::Display for B3Digest { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "b3:{}", BASE64.encode(self.0.as_slice())) + } +} diff --git a/tvix/store/src/directoryservice/grpc.rs b/tvix/store/src/directoryservice/grpc.rs new file mode 100644 index 000000000000..1b33572cf7de --- /dev/null +++ b/tvix/store/src/directoryservice/grpc.rs @@ -0,0 +1,532 @@ +use std::collections::HashSet; + +use super::{DirectoryPutter, DirectoryService}; +use crate::proto::{self, get_directory_request::ByWhat}; +use crate::{B3Digest, Error}; +use tokio::sync::mpsc::UnboundedSender; +use tokio_stream::wrappers::UnboundedReceiverStream; +use tonic::{transport::Channel, Status}; +use tonic::{Code, Streaming}; +use tracing::{instrument, warn}; + +/// Connects to a (remote) tvix-store DirectoryService over gRPC. +#[derive(Clone)] +pub struct GRPCDirectoryService { + /// A handle into the active tokio runtime. Necessary to spawn tasks. + tokio_handle: tokio::runtime::Handle, + + /// The internal reference to a gRPC client. + /// Cloning it is cheap, and it internally handles concurrent requests. + grpc_client: proto::directory_service_client::DirectoryServiceClient<Channel>, +} + +impl GRPCDirectoryService { + /// Construct a new [GRPCDirectoryService], by passing a handle to the + /// tokio runtime, and a gRPC client. + pub fn new( + tokio_handle: tokio::runtime::Handle, + grpc_client: proto::directory_service_client::DirectoryServiceClient<Channel>, + ) -> Self { + Self { + tokio_handle, + grpc_client, + } + } + + /// construct a [GRPCDirectoryService] from a [proto::blob_service_client::BlobServiceClient<Channel>]. + /// panics if called outside the context of a tokio runtime. + pub fn from_client( + grpc_client: proto::directory_service_client::DirectoryServiceClient<Channel>, + ) -> Self { + Self { + tokio_handle: tokio::runtime::Handle::current(), + grpc_client, + } + } +} + +impl DirectoryService for GRPCDirectoryService { + type DirectoriesIterator = StreamIterator; + + fn get(&self, digest: &B3Digest) -> Result<Option<crate::proto::Directory>, crate::Error> { + // Get a new handle to the gRPC client, and copy the digest. + let mut grpc_client = self.grpc_client.clone(); + + let digest_as_vec = digest.to_vec(); + let task = self.tokio_handle.spawn(async move { + let mut s = grpc_client + .get(proto::GetDirectoryRequest { + recursive: false, + by_what: Some(ByWhat::Digest(digest_as_vec)), + }) + .await? + .into_inner(); + + // Retrieve the first message only, then close the stream (we set recursive to false) + s.message().await + }); + + let digest = digest.clone(); + match self.tokio_handle.block_on(task)? { + Ok(Some(directory)) => { + // Validate the retrieved Directory indeed has the + // digest we expect it to have, to detect corruptions. + let actual_digest = directory.digest(); + if actual_digest != digest { + Err(crate::Error::StorageError(format!( + "requested directory with digest {}, but got {}", + digest, actual_digest + ))) + } else if let Err(e) = directory.validate() { + // Validate the Directory itself is valid. + warn!("directory failed validation: {}", e.to_string()); + Err(crate::Error::StorageError(format!( + "directory {} failed validation: {}", + digest, e, + ))) + } else { + Ok(Some(directory)) + } + } + Ok(None) => Ok(None), + Err(e) if e.code() == Code::NotFound => Ok(None), + Err(e) => Err(crate::Error::StorageError(e.to_string())), + } + } + + fn put(&self, directory: crate::proto::Directory) -> Result<B3Digest, crate::Error> { + let mut grpc_client = self.grpc_client.clone(); + + let task = self + .tokio_handle + .spawn(async move { grpc_client.put(tokio_stream::iter(vec![directory])).await }); + + match self.tokio_handle.block_on(task)? { + Ok(put_directory_resp) => Ok(B3Digest::from_vec( + put_directory_resp.into_inner().root_digest, + ) + .map_err(|_| { + Error::StorageError("invalid root digest length in response".to_string()) + })?), + Err(e) => Err(crate::Error::StorageError(e.to_string())), + } + } + + #[instrument(skip_all, fields(directory.digest = %root_directory_digest))] + fn get_recursive(&self, root_directory_digest: &B3Digest) -> Self::DirectoriesIterator { + let mut grpc_client = self.grpc_client.clone(); + + let root_directory_digest_as_vec = root_directory_digest.to_vec(); + let task: tokio::task::JoinHandle<Result<Streaming<proto::Directory>, Status>> = + self.tokio_handle.spawn(async move { + let s = grpc_client + .get(proto::GetDirectoryRequest { + recursive: true, + by_what: Some(ByWhat::Digest(root_directory_digest_as_vec)), + }) + .await? + .into_inner(); + + Ok(s) + }); + + let stream = self.tokio_handle.block_on(task).unwrap().unwrap(); + + StreamIterator::new( + self.tokio_handle.clone(), + root_directory_digest.clone(), + stream, + ) + } + + type DirectoryPutter = GRPCPutter; + + #[instrument(skip_all)] + fn put_multiple_start(&self) -> Self::DirectoryPutter + where + Self: Clone, + { + let mut grpc_client = self.grpc_client.clone(); + + let (tx, rx) = tokio::sync::mpsc::unbounded_channel(); + + let task: tokio::task::JoinHandle<Result<proto::PutDirectoryResponse, Status>> = + self.tokio_handle.spawn(async move { + let s = grpc_client + .put(UnboundedReceiverStream::new(rx)) + .await? + .into_inner(); + + Ok(s) + }); + + GRPCPutter::new(self.tokio_handle.clone(), tx, task) + } +} + +pub struct StreamIterator { + /// A handle into the active tokio runtime. Necessary to run futures to completion. + tokio_handle: tokio::runtime::Handle, + // A stream of [proto::Directory] + stream: Streaming<proto::Directory>, + // The Directory digests we received so far + received_directory_digests: HashSet<B3Digest>, + // The Directory digests we're still expecting to get sent. + expected_directory_digests: HashSet<B3Digest>, +} + +impl StreamIterator { + pub fn new( + tokio_handle: tokio::runtime::Handle, + root_digest: B3Digest, + stream: Streaming<proto::Directory>, + ) -> Self { + Self { + tokio_handle, + stream, + received_directory_digests: HashSet::new(), + expected_directory_digests: HashSet::from([root_digest]), + } + } +} + +impl Iterator for StreamIterator { + type Item = Result<proto::Directory, crate::Error>; + + fn next(&mut self) -> Option<Self::Item> { + match self.tokio_handle.block_on(self.stream.message()) { + Ok(ok) => match ok { + Some(directory) => { + // validate the directory itself. + if let Err(e) = directory.validate() { + return Some(Err(crate::Error::StorageError(format!( + "directory {} failed validation: {}", + directory.digest(), + e, + )))); + } + // validate we actually expected that directory, and move it from expected to received. + let directory_digest = directory.digest(); + let was_expected = self.expected_directory_digests.remove(&directory_digest); + if !was_expected { + // FUTUREWORK: dumb clients might send the same stuff twice. + // as a fallback, we might want to tolerate receiving + // it if it's in received_directory_digests (as that + // means it once was in expected_directory_digests) + return Some(Err(crate::Error::StorageError(format!( + "received unexpected directory {}", + directory_digest + )))); + } + self.received_directory_digests.insert(directory_digest); + + // register all children in expected_directory_digests. + for child_directory in &directory.directories { + // We ran validate() above, so we know these digests must be correct. + let child_directory_digest = + B3Digest::from_vec(child_directory.digest.clone()).unwrap(); + + self.expected_directory_digests + .insert(child_directory_digest); + } + + Some(Ok(directory)) + } + None => { + // If we were still expecting something, that's an error. + if !self.expected_directory_digests.is_empty() { + Some(Err(crate::Error::StorageError(format!( + "still expected {} directories, but got premature end of stream", + self.expected_directory_digests.len(), + )))) + } else { + None + } + } + }, + Err(e) => Some(Err(crate::Error::StorageError(e.to_string()))), + } + } +} + +/// Allows uploading multiple Directory messages in the same gRPC stream. +pub struct GRPCPutter { + /// A handle into the active tokio runtime. Necessary to spawn tasks. + tokio_handle: tokio::runtime::Handle, + + /// Data about the current request - a handle to the task, and the tx part + /// of the channel. + /// The tx part of the pipe is used to send [proto::Directory] to the ongoing request. + /// The task will yield a [proto::PutDirectoryResponse] once the stream is closed. + #[allow(clippy::type_complexity)] // lol + rq: Option<( + tokio::task::JoinHandle<Result<proto::PutDirectoryResponse, Status>>, + UnboundedSender<proto::Directory>, + )>, +} + +impl GRPCPutter { + pub fn new( + tokio_handle: tokio::runtime::Handle, + directory_sender: UnboundedSender<proto::Directory>, + task: tokio::task::JoinHandle<Result<proto::PutDirectoryResponse, Status>>, + ) -> Self { + Self { + tokio_handle, + rq: Some((task, directory_sender)), + } + } + + #[allow(dead_code)] + // allows checking if the tx part of the channel is closed. + fn is_closed(&self) -> bool { + match self.rq { + None => true, + Some((_, ref directory_sender)) => directory_sender.is_closed(), + } + } +} + +impl DirectoryPutter for GRPCPutter { + fn put(&mut self, directory: proto::Directory) -> Result<(), crate::Error> { + match self.rq { + // If we're not already closed, send the directory to directory_sender. + Some((_, ref directory_sender)) => { + if directory_sender.send(directory).is_err() { + // If the channel has been prematurely closed, invoke close (so we can peek at the error code) + // That error code is much more helpful, because it + // contains the error message from the server. + self.close()?; + } + Ok(()) + } + // If self.close() was already called, we can't put again. + None => Err(Error::StorageError( + "DirectoryPutter already closed".to_string(), + )), + } + } + + /// Closes the stream for sending, and returns the value + fn close(&mut self) -> Result<B3Digest, crate::Error> { + // get self.rq, and replace it with None. + // This ensures we can only close it once. + match std::mem::take(&mut self.rq) { + None => Err(Error::StorageError("already closed".to_string())), + Some((task, directory_sender)) => { + // close directory_sender, so blocking on task will finish. + drop(directory_sender); + + let root_digest = self + .tokio_handle + .block_on(task)? + .map_err(|e| Error::StorageError(e.to_string()))? + .root_digest; + + B3Digest::from_vec(root_digest).map_err(|_| { + Error::StorageError("invalid root digest length in response".to_string()) + }) + } + } + } +} + +#[cfg(test)] +mod tests { + use core::time; + use std::thread; + + use tempfile::TempDir; + use tokio::net::{UnixListener, UnixStream}; + use tokio_stream::wrappers::UnixListenerStream; + use tonic::transport::{Endpoint, Server, Uri}; + + use crate::{ + directoryservice::{DirectoryPutter, DirectoryService}, + proto, + proto::{directory_service_server::DirectoryServiceServer, GRPCDirectoryServiceWrapper}, + tests::{ + fixtures::{DIRECTORY_A, DIRECTORY_B}, + utils::gen_directory_service, + }, + }; + + #[test] + fn test() -> anyhow::Result<()> { + let tmpdir = TempDir::new().unwrap(); + let socket_path = tmpdir.path().join("socket"); + + // Spin up a server, in a thread far away, which spawns its own tokio runtime, + // and blocks on the task. + let socket_path_clone = socket_path.clone(); + thread::spawn(move || { + // Create the runtime + let rt = tokio::runtime::Runtime::new().unwrap(); + // Get a handle from this runtime + let handle = rt.handle(); + + let task = handle.spawn(async { + let uds = UnixListener::bind(socket_path_clone).unwrap(); + let uds_stream = UnixListenerStream::new(uds); + + // spin up a new DirectoryService + let mut server = Server::builder(); + let router = server.add_service(DirectoryServiceServer::new( + GRPCDirectoryServiceWrapper::from(gen_directory_service()), + )); + router.serve_with_incoming(uds_stream).await + }); + + handle.block_on(task) + }); + + // set up the local client runtime. This is similar to what the [tokio:test] macro desugars to. + let tester_runtime = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + + // wait for the socket to be created + { + let mut socket_created = false; + for _try in 1..20 { + if socket_path.exists() { + socket_created = true; + break; + } + std::thread::sleep(time::Duration::from_millis(20)) + } + + assert!( + socket_created, + "expected socket path to eventually get created, but never happened" + ); + } + + let task = tester_runtime.spawn_blocking(move || { + // Create a channel, connecting to the uds at socket_path. + // The URI is unused. + let channel = Endpoint::try_from("http://[::]:50051") + .unwrap() + .connect_with_connector_lazy(tower::service_fn(move |_: Uri| { + UnixStream::connect(socket_path.clone()) + })); + + let grpc_client = proto::directory_service_client::DirectoryServiceClient::new(channel); + + // create the GrpcDirectoryService, using the tester_runtime. + let directory_service = + super::GRPCDirectoryService::new(tokio::runtime::Handle::current(), grpc_client); + + // try to get DIRECTORY_A should return Ok(None) + assert_eq!( + None, + directory_service + .get(&DIRECTORY_A.digest()) + .expect("must not fail") + ); + + // Now upload it + assert_eq!( + DIRECTORY_A.digest(), + directory_service + .put(DIRECTORY_A.clone()) + .expect("must succeed") + ); + + // And retrieve it, compare for equality. + assert_eq!( + DIRECTORY_A.clone(), + directory_service + .get(&DIRECTORY_A.digest()) + .expect("must succeed") + .expect("must be some") + ); + + // Putting DIRECTORY_B alone should fail, because it refers to DIRECTORY_A. + directory_service + .put(DIRECTORY_B.clone()) + .expect_err("must fail"); + + // Putting DIRECTORY_B in a put_multiple will succeed, but the close + // will always fail. + { + let mut handle = directory_service.put_multiple_start(); + handle.put(DIRECTORY_B.clone()).expect("must succeed"); + handle.close().expect_err("must fail"); + } + + // Uploading A and then B should succeed, and closing should return the digest of B. + let mut handle = directory_service.put_multiple_start(); + handle.put(DIRECTORY_A.clone()).expect("must succeed"); + handle.put(DIRECTORY_B.clone()).expect("must succeed"); + let digest = handle.close().expect("must succeed"); + assert_eq!(DIRECTORY_B.digest(), digest); + + // Now try to retrieve the closure of DIRECTORY_B, which should return B and then A. + let mut directories_it = directory_service.get_recursive(&DIRECTORY_B.digest()); + assert_eq!( + DIRECTORY_B.clone(), + directories_it + .next() + .expect("must be some") + .expect("must succeed") + ); + assert_eq!( + DIRECTORY_A.clone(), + directories_it + .next() + .expect("must be some") + .expect("must succeed") + ); + + // Uploading B and then A should fail, because B refers to A, which + // hasn't been uploaded yet. + // However, the client can burst, so we might not have received the + // error back from the server. + { + let mut handle = directory_service.put_multiple_start(); + // sending out B will always be fine + handle.put(DIRECTORY_B.clone()).expect("must succeed"); + + // whether we will be able to put A as well depends on whether we + // already received the error about B. + if handle.put(DIRECTORY_A.clone()).is_ok() { + // If we didn't, and this was Ok(_), … + // a subsequent close MUST fail (because it waits for the + // server) + handle.close().expect_err("must fail"); + } + } + + // Now we do the same test as before, send B, then A, but wait + // sufficiently enough for the server to have s + // to close us the stream, + // and then assert that uploading anything else via the handle will fail. + { + let mut handle = directory_service.put_multiple_start(); + handle.put(DIRECTORY_B.clone()).expect("must succeed"); + + let mut is_closed = false; + for _try in 1..1000 { + if handle.is_closed() { + is_closed = true; + break; + } + std::thread::sleep(time::Duration::from_millis(10)) + } + + assert!( + is_closed, + "expected channel to eventually close, but never happened" + ); + + handle.put(DIRECTORY_A.clone()).expect_err("must fail"); + } + }); + + tester_runtime.block_on(task)?; + + Ok(()) + } +} diff --git a/tvix/store/src/directoryservice/memory.rs b/tvix/store/src/directoryservice/memory.rs new file mode 100644 index 000000000000..1fd619f7c8cb --- /dev/null +++ b/tvix/store/src/directoryservice/memory.rs @@ -0,0 +1,84 @@ +use crate::{proto, B3Digest, Error}; +use std::collections::HashMap; +use std::sync::{Arc, RwLock}; +use tracing::{instrument, warn}; + +use super::utils::SimplePutter; +use super::{DirectoryService, DirectoryTraverser}; + +#[derive(Clone, Default)] +pub struct MemoryDirectoryService { + db: Arc<RwLock<HashMap<B3Digest, proto::Directory>>>, +} + +impl DirectoryService for MemoryDirectoryService { + type DirectoriesIterator = DirectoryTraverser<Self>; + + #[instrument(skip(self, digest), fields(directory.digest = %digest))] + fn get(&self, digest: &B3Digest) -> Result<Option<proto::Directory>, Error> { + let db = self.db.read()?; + + match db.get(digest) { + // The directory was not found, return + None => Ok(None), + + // The directory was found, try to parse the data as Directory message + Some(directory) => { + // Validate the retrieved Directory indeed has the + // digest we expect it to have, to detect corruptions. + let actual_digest = directory.digest(); + if actual_digest != *digest { + return Err(Error::StorageError(format!( + "requested directory with digest {}, but got {}", + digest, actual_digest + ))); + } + + // Validate the Directory itself is valid. + if let Err(e) = directory.validate() { + warn!("directory failed validation: {}", e.to_string()); + return Err(Error::StorageError(format!( + "directory {} failed validation: {}", + actual_digest, e, + ))); + } + + Ok(Some(directory.clone())) + } + } + } + + #[instrument(skip(self, directory), fields(directory.digest = %directory.digest()))] + fn put(&self, directory: proto::Directory) -> Result<B3Digest, Error> { + let digest = directory.digest(); + + // validate the directory itself. + if let Err(e) = directory.validate() { + return Err(Error::InvalidRequest(format!( + "directory {} failed validation: {}", + digest, e, + ))); + } + + // store it + let mut db = self.db.write()?; + db.insert(digest.clone(), directory); + + Ok(digest) + } + + #[instrument(skip_all, fields(directory.digest = %root_directory_digest))] + fn get_recursive(&self, root_directory_digest: &B3Digest) -> Self::DirectoriesIterator { + DirectoryTraverser::with(self.clone(), root_directory_digest) + } + + type DirectoryPutter = SimplePutter<Self>; + + #[instrument(skip_all)] + fn put_multiple_start(&self) -> Self::DirectoryPutter + where + Self: Clone, + { + SimplePutter::new(self.clone()) + } +} diff --git a/tvix/store/src/directoryservice/mod.rs b/tvix/store/src/directoryservice/mod.rs new file mode 100644 index 000000000000..f387d28948f0 --- /dev/null +++ b/tvix/store/src/directoryservice/mod.rs @@ -0,0 +1,54 @@ +use crate::{proto, B3Digest, Error}; +mod grpc; +mod memory; +mod sled; +mod traverse; +mod utils; + +pub use self::grpc::GRPCDirectoryService; +pub use self::memory::MemoryDirectoryService; +pub use self::sled::SledDirectoryService; +pub use self::traverse::traverse_to; +pub use self::utils::DirectoryTraverser; + +/// The base trait all Directory services need to implement. +/// This is a simple get and put of [crate::proto::Directory], returning their +/// digest. +pub trait DirectoryService { + type DirectoriesIterator: Iterator<Item = Result<proto::Directory, Error>> + Send; + type DirectoryPutter: DirectoryPutter; + + /// Get looks up a single Directory message by its digest. + /// In case the directory is not found, Ok(None) is returned. + fn get(&self, digest: &B3Digest) -> Result<Option<proto::Directory>, Error>; + /// Get uploads a single Directory message, and returns the calculated + /// digest, or an error. + fn put(&self, directory: proto::Directory) -> Result<B3Digest, Error>; + + /// Looks up a closure of [proto::Directory]. + /// Ideally this would be a `impl Iterator<Item = Result<proto::Directory, Error>>`, + /// and we'd be able to add a default implementation for it here, but + /// we can't have that yet. + fn get_recursive(&self, root_directory_digest: &B3Digest) -> Self::DirectoriesIterator; + + /// Allows persisting a closure of [proto::Directory], which is a graph of + /// connected Directory messages. + fn put_multiple_start(&self) -> Self::DirectoryPutter; +} + +/// Provides a handle to put a closure of connected [proto::Directory] elements. +/// +/// The consumer can periodically call [put], starting from the leaves. Once +/// the root is reached, [close] can be called to retrieve the root digest (or +/// an error). +pub trait DirectoryPutter { + /// Put a individual [proto::Directory] into the store. + /// Error semantics and behaviour is up to the specific implementation of + /// this trait. + /// Due to bursting, the returned error might refer to an object previously + /// sent via `put`. + fn put(&mut self, directory: proto::Directory) -> Result<(), Error>; + + /// Close the stream, and wait for any errors. + fn close(&mut self) -> Result<B3Digest, Error>; +} diff --git a/tvix/store/src/directoryservice/sled.rs b/tvix/store/src/directoryservice/sled.rs new file mode 100644 index 000000000000..e189e8acf507 --- /dev/null +++ b/tvix/store/src/directoryservice/sled.rs @@ -0,0 +1,107 @@ +use crate::proto::Directory; +use crate::{proto, B3Digest, Error}; +use prost::Message; +use std::path::PathBuf; +use tracing::{instrument, warn}; + +use super::utils::SimplePutter; +use super::{DirectoryService, DirectoryTraverser}; + +#[derive(Clone)] +pub struct SledDirectoryService { + db: sled::Db, +} + +impl SledDirectoryService { + pub fn new(p: PathBuf) -> Result<Self, sled::Error> { + let config = sled::Config::default().use_compression(true).path(p); + let db = config.open()?; + + Ok(Self { db }) + } + + pub fn new_temporary() -> Result<Self, sled::Error> { + let config = sled::Config::default().temporary(true); + let db = config.open()?; + + Ok(Self { db }) + } +} + +impl DirectoryService for SledDirectoryService { + type DirectoriesIterator = DirectoryTraverser<Self>; + + #[instrument(skip(self, digest), fields(directory.digest = %digest))] + fn get(&self, digest: &B3Digest) -> Result<Option<proto::Directory>, Error> { + match self.db.get(digest.to_vec()) { + // The directory was not found, return + Ok(None) => Ok(None), + + // The directory was found, try to parse the data as Directory message + Ok(Some(data)) => match Directory::decode(&*data) { + Ok(directory) => { + // Validate the retrieved Directory indeed has the + // digest we expect it to have, to detect corruptions. + let actual_digest = directory.digest(); + if actual_digest != *digest { + return Err(Error::StorageError(format!( + "requested directory with digest {}, but got {}", + digest, actual_digest + ))); + } + + // Validate the Directory itself is valid. + if let Err(e) = directory.validate() { + warn!("directory failed validation: {}", e.to_string()); + return Err(Error::StorageError(format!( + "directory {} failed validation: {}", + actual_digest, e, + ))); + } + + Ok(Some(directory)) + } + Err(e) => { + warn!("unable to parse directory {}: {}", digest, e); + Err(Error::StorageError(e.to_string())) + } + }, + // some storage error? + Err(e) => Err(Error::StorageError(e.to_string())), + } + } + + #[instrument(skip(self, directory), fields(directory.digest = %directory.digest()))] + fn put(&self, directory: proto::Directory) -> Result<B3Digest, Error> { + let digest = directory.digest(); + + // validate the directory itself. + if let Err(e) = directory.validate() { + return Err(Error::InvalidRequest(format!( + "directory {} failed validation: {}", + digest, e, + ))); + } + // store it + let result = self.db.insert(digest.to_vec(), directory.encode_to_vec()); + if let Err(e) = result { + return Err(Error::StorageError(e.to_string())); + } + Ok(digest) + } + + #[instrument(skip_all, fields(directory.digest = %root_directory_digest))] + fn get_recursive(&self, root_directory_digest: &B3Digest) -> Self::DirectoriesIterator { + DirectoryTraverser::with(self.clone(), root_directory_digest) + } + + type DirectoryPutter = SimplePutter<Self>; + + #[instrument(skip_all)] + fn put_multiple_start(&self) -> Self::DirectoryPutter + where + Self: Clone, + { + SimplePutter::new(self.clone()) + } +} diff --git a/tvix/store/src/directoryservice/traverse.rs b/tvix/store/src/directoryservice/traverse.rs new file mode 100644 index 000000000000..8691baa8b73f --- /dev/null +++ b/tvix/store/src/directoryservice/traverse.rs @@ -0,0 +1,222 @@ +use super::DirectoryService; +use crate::{proto::NamedNode, B3Digest, Error}; +use tracing::{instrument, warn}; + +/// This traverses from a (root) node to the given (sub)path, returning the Node +/// at that path, or none, if there's nothing at that path. +/// TODO: Do we want to rewrite this in a non-recursing fashion, and use +/// [DirectoryService.get_recursive] to do less lookups? +/// Or do we consider this to be a non-issue due to store composition and local caching? +/// TODO: the name of this function (and mod) is a bit bad, because it doesn't +/// clearly distinguish it from the BFS traversers. +#[instrument(skip(directory_service))] +pub fn traverse_to<DS: DirectoryService>( + directory_service: &DS, + node: crate::proto::node::Node, + path: &std::path::Path, +) -> Result<Option<crate::proto::node::Node>, Error> { + // strip a possible `/` prefix from the path. + let path = { + if path.starts_with("/") { + path.strip_prefix("/").unwrap() + } else { + path + } + }; + + let mut it = path.components(); + + match it.next() { + None => { + // the (remaining) path is empty, return the node we've been called with. + Ok(Some(node)) + } + Some(first_component) => { + match node { + crate::proto::node::Node::File(_) | crate::proto::node::Node::Symlink(_) => { + // There's still some path left, but the current node is no directory. + // This means the path doesn't exist, as we can't reach it. + Ok(None) + } + crate::proto::node::Node::Directory(directory_node) => { + let digest = B3Digest::from_vec(directory_node.digest) + .map_err(|_e| Error::StorageError("invalid digest length".to_string()))?; + + // fetch the linked node from the directory_service + match directory_service.get(&digest)? { + // If we didn't get the directory node that's linked, that's a store inconsistency, bail out! + None => { + warn!("directory {} does not exist", digest); + + Err(Error::StorageError(format!( + "directory {} does not exist", + digest + ))) + } + Some(directory) => { + // look for first_component in the [Directory]. + // FUTUREWORK: as the nodes() iterator returns in a sorted fashion, we + // could stop as soon as e.name is larger than the search string. + let child_node = directory.nodes().find(|n| { + n.get_name() == first_component.as_os_str().to_str().unwrap() + }); + + match child_node { + // child node not found means there's no such element inside the directory. + None => Ok(None), + // child node found, recurse with it and the rest of the path. + Some(child_node) => { + let rest_path: std::path::PathBuf = it.collect(); + traverse_to(directory_service, child_node, &rest_path) + } + } + } + } + } + } + } + } +} + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + + use crate::{ + directoryservice::DirectoryPutter, + directoryservice::DirectoryService, + tests::{ + fixtures::{DIRECTORY_COMPLICATED, DIRECTORY_WITH_KEEP}, + utils::gen_directory_service, + }, + }; + + use super::traverse_to; + + #[test] + fn test_traverse_to() { + let mut directory_service = gen_directory_service(); + + let mut handle = directory_service.put_multiple_start(); + handle + .put(DIRECTORY_WITH_KEEP.clone()) + .expect("must succeed"); + handle + .put(DIRECTORY_COMPLICATED.clone()) + .expect("must succeed"); + + // construct the node for DIRECTORY_COMPLICATED + let node_directory_complicated = + crate::proto::node::Node::Directory(crate::proto::DirectoryNode { + name: "doesntmatter".to_string(), + digest: DIRECTORY_COMPLICATED.digest().to_vec(), + size: DIRECTORY_COMPLICATED.size(), + }); + + // construct the node for DIRECTORY_COMPLICATED + let node_directory_with_keep = crate::proto::node::Node::Directory( + DIRECTORY_COMPLICATED.directories.first().unwrap().clone(), + ); + + // construct the node for the .keep file + let node_file_keep = + crate::proto::node::Node::File(DIRECTORY_WITH_KEEP.files.first().unwrap().clone()); + + // traversal to an empty subpath should return the root node. + { + let resp = traverse_to( + &mut directory_service, + node_directory_complicated.clone(), + &PathBuf::from(""), + ) + .expect("must succeed"); + + assert_eq!(Some(node_directory_complicated.clone()), resp); + } + + // traversal to `keep` should return the node for DIRECTORY_WITH_KEEP + { + let resp = traverse_to( + &mut directory_service, + node_directory_complicated.clone(), + &PathBuf::from("keep"), + ) + .expect("must succeed"); + + assert_eq!(Some(node_directory_with_keep.clone()), resp); + } + + // traversal to `keep/.keep` should return the node for the .keep file + { + let resp = traverse_to( + &mut directory_service, + node_directory_complicated.clone(), + &PathBuf::from("keep/.keep"), + ) + .expect("must succeed"); + + assert_eq!(Some(node_file_keep.clone()), resp); + } + + // traversal to `keep/.keep` should return the node for the .keep file + { + let resp = traverse_to( + &mut directory_service, + node_directory_complicated.clone(), + &PathBuf::from("/keep/.keep"), + ) + .expect("must succeed"); + + assert_eq!(Some(node_file_keep.clone()), resp); + } + + // traversal to `void` should return None (doesn't exist) + { + let resp = traverse_to( + &mut directory_service, + node_directory_complicated.clone(), + &PathBuf::from("void"), + ) + .expect("must succeed"); + + assert_eq!(None, resp); + } + + // traversal to `void` should return None (doesn't exist) + { + let resp = traverse_to( + &mut directory_service, + node_directory_complicated.clone(), + &PathBuf::from("//v/oid"), + ) + .expect("must succeed"); + + assert_eq!(None, resp); + } + + // traversal to `keep/.keep/404` should return None (the path can't be + // reached, as keep/.keep already is a file) + { + let resp = traverse_to( + &mut directory_service, + node_directory_complicated.clone(), + &PathBuf::from("keep/.keep/foo"), + ) + .expect("must succeed"); + + assert_eq!(None, resp); + } + + // traversal to a subpath of '/' should return the root node. + { + let resp = traverse_to( + &mut directory_service, + node_directory_complicated.clone(), + &PathBuf::from("/"), + ) + .expect("must succeed"); + + assert_eq!(Some(node_directory_complicated.clone()), resp); + } + } +} diff --git a/tvix/store/src/directoryservice/utils.rs b/tvix/store/src/directoryservice/utils.rs new file mode 100644 index 000000000000..3661808734f3 --- /dev/null +++ b/tvix/store/src/directoryservice/utils.rs @@ -0,0 +1,140 @@ +use super::DirectoryPutter; +use super::DirectoryService; +use crate::proto; +use crate::B3Digest; +use crate::Error; +use std::collections::{HashSet, VecDeque}; +use tracing::{debug_span, instrument, warn}; + +/// Traverses a [proto::Directory] from the root to the children. +/// +/// This is mostly BFS, but directories are only returned once. +pub struct DirectoryTraverser<DS: DirectoryService> { + directory_service: DS, + /// The list of all directories that still need to be traversed. The next + /// element is picked from the front, new elements are enqueued at the + /// back. + worklist_directory_digests: VecDeque<B3Digest>, + /// The list of directory digests already sent to the consumer. + /// We omit sending the same directories multiple times. + sent_directory_digests: HashSet<B3Digest>, +} + +impl<DS: DirectoryService> DirectoryTraverser<DS> { + pub fn with(directory_service: DS, root_directory_digest: &B3Digest) -> Self { + Self { + directory_service, + worklist_directory_digests: VecDeque::from([root_directory_digest.clone()]), + sent_directory_digests: HashSet::new(), + } + } + + // enqueue all child directory digests to the work queue, as + // long as they're not part of the worklist or already sent. + // This panics if the digest looks invalid, it's supposed to be checked first. + fn enqueue_child_directories(&mut self, directory: &proto::Directory) { + for child_directory_node in &directory.directories { + // TODO: propagate error + let child_digest = B3Digest::from_vec(child_directory_node.digest.clone()).unwrap(); + + if self.worklist_directory_digests.contains(&child_digest) + || self.sent_directory_digests.contains(&child_digest) + { + continue; + } + self.worklist_directory_digests.push_back(child_digest); + } + } +} + +impl<DS: DirectoryService> Iterator for DirectoryTraverser<DS> { + type Item = Result<proto::Directory, Error>; + + #[instrument(skip_all)] + fn next(&mut self) -> Option<Self::Item> { + // fetch the next directory digest from the top of the work queue. + match self.worklist_directory_digests.pop_front() { + None => None, + Some(current_directory_digest) => { + let span = debug_span!("directory.digest", "{}", current_directory_digest); + let _ = span.enter(); + + // look up the directory itself. + let current_directory = match self.directory_service.get(¤t_directory_digest) + { + // if we got it + Ok(Some(current_directory)) => { + // validate, we don't want to send invalid directories. + if let Err(e) = current_directory.validate() { + warn!("directory failed validation: {}", e.to_string()); + return Some(Err(Error::StorageError(format!( + "invalid directory: {}", + current_directory_digest + )))); + } + current_directory + } + // if it's not there, we have an inconsistent store! + Ok(None) => { + warn!("directory {} does not exist", current_directory_digest); + return Some(Err(Error::StorageError(format!( + "directory {} does not exist", + current_directory_digest + )))); + } + Err(e) => { + warn!("failed to look up directory"); + return Some(Err(Error::StorageError(format!( + "unable to look up directory {}: {}", + current_directory_digest, e + )))); + } + }; + + // All DirectoryServices MUST validate directory nodes, before returning them out, so we + // can be sure [enqueue_child_directories] doesn't panic. + + // enqueue child directories + self.enqueue_child_directories(¤t_directory); + Some(Ok(current_directory)) + } + } + } +} + +/// This is a simple implementation of a Directory uploader. +/// TODO: verify connectivity? Factor out these checks into generic helpers? +pub struct SimplePutter<DS: DirectoryService> { + directory_service: DS, + last_directory_digest: Option<B3Digest>, +} + +impl<DS: DirectoryService> SimplePutter<DS> { + pub fn new(directory_service: DS) -> Self { + Self { + directory_service, + last_directory_digest: None, + } + } +} + +impl<DS: DirectoryService> DirectoryPutter for SimplePutter<DS> { + fn put(&mut self, directory: proto::Directory) -> Result<(), Error> { + let digest = self.directory_service.put(directory)?; + + // track the last directory digest + self.last_directory_digest = Some(digest); + + Ok(()) + } + + /// We need to be mutable here, as that's the signature of the trait. + fn close(&mut self) -> Result<B3Digest, Error> { + match &self.last_directory_digest { + Some(last_digest) => Ok(last_digest.clone()), + None => Err(Error::InvalidRequest( + "no directories sent, can't show root digest".to_string(), + )), + } + } +} diff --git a/tvix/store/src/errors.rs b/tvix/store/src/errors.rs new file mode 100644 index 000000000000..3b23f972b045 --- /dev/null +++ b/tvix/store/src/errors.rs @@ -0,0 +1,45 @@ +use std::sync::PoisonError; +use thiserror::Error; +use tokio::task::JoinError; +use tonic::Status; + +/// Errors related to communication with the store. +#[derive(Debug, Error)] +pub enum Error { + #[error("invalid request: {0}")] + InvalidRequest(String), + + #[error("internal storage error: {0}")] + StorageError(String), +} + +impl<T> From<PoisonError<T>> for Error { + fn from(value: PoisonError<T>) -> Self { + Error::StorageError(value.to_string()) + } +} + +impl From<JoinError> for Error { + fn from(value: JoinError) -> Self { + Error::StorageError(value.to_string()) + } +} + +impl From<Error> for Status { + fn from(value: Error) -> Self { + match value { + Error::InvalidRequest(msg) => Status::invalid_argument(msg), + Error::StorageError(msg) => Status::data_loss(format!("storage error: {}", msg)), + } + } +} + +// TODO: this should probably go somewhere else? +impl From<Error> for std::io::Error { + fn from(value: Error) -> Self { + match value { + Error::InvalidRequest(msg) => Self::new(std::io::ErrorKind::InvalidInput, msg), + Error::StorageError(msg) => Self::new(std::io::ErrorKind::Other, msg), + } + } +} diff --git a/tvix/store/src/import.rs b/tvix/store/src/import.rs new file mode 100644 index 000000000000..206e5eaba975 --- /dev/null +++ b/tvix/store/src/import.rs @@ -0,0 +1,220 @@ +use crate::{blobservice::BlobService, directoryservice::DirectoryService}; +use crate::{blobservice::BlobWriter, directoryservice::DirectoryPutter, proto}; +use std::{ + collections::HashMap, + fmt::Debug, + fs, + fs::File, + io, + os::unix::prelude::PermissionsExt, + path::{Path, PathBuf}, +}; +use tracing::instrument; +use walkdir::WalkDir; + +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("failed to upload directory at {0}: {1}")] + UploadDirectoryError(PathBuf, crate::Error), + + #[error("invalid encoding encountered for entry {0:?}")] + InvalidEncoding(PathBuf), + + #[error("unable to stat {0}: {1}")] + UnableToStat(PathBuf, std::io::Error), + + #[error("unable to open {0}: {1}")] + UnableToOpen(PathBuf, std::io::Error), + + #[error("unable to read {0}: {1}")] + UnableToRead(PathBuf, std::io::Error), +} + +impl From<super::Error> for Error { + fn from(value: super::Error) -> Self { + match value { + crate::Error::InvalidRequest(_) => panic!("tvix bug"), + crate::Error::StorageError(_) => panic!("error"), + } + } +} + +// This processes a given [walkdir::DirEntry] and returns a +// proto::node::Node, depending on the type of the entry. +// +// If the entry is a file, its contents are uploaded. +// If the entry is a directory, the Directory is uploaded as well. +// For this to work, it relies on the caller to provide the directory object +// with the previously returned (child) nodes. +// +// It assumes entries to be returned in "contents first" order, means this +// will only be called with a directory if all children of it have been +// visited. If the entry is indeed a directory, it'll also upload that +// directory to the store. For this, the so-far-assembled Directory object for +// this path needs to be passed in. +// +// It assumes the caller adds returned nodes to the directories it assembles. +#[instrument(skip_all, fields(entry.file_type=?&entry.file_type(),entry.path=?entry.path()))] +fn process_entry<BS: BlobService, DP: DirectoryPutter>( + blob_service: &BS, + directory_putter: &mut DP, + entry: &walkdir::DirEntry, + maybe_directory: Option<proto::Directory>, +) -> Result<proto::node::Node, Error> { + let file_type = entry.file_type(); + + let entry_path: PathBuf = entry.path().to_path_buf(); + + if file_type.is_dir() { + let directory = maybe_directory + .expect("tvix bug: must be called with some directory in the case of directory"); + let directory_digest = directory.digest(); + let directory_size = directory.size(); + + // upload this directory + directory_putter + .put(directory) + .map_err(|e| Error::UploadDirectoryError(entry.path().to_path_buf(), e))?; + + return Ok(proto::node::Node::Directory(proto::DirectoryNode { + name: entry + .file_name() + .to_str() + .map(|s| Ok(s.to_owned())) + .unwrap_or(Err(Error::InvalidEncoding(entry.path().to_path_buf())))?, + digest: directory_digest.to_vec(), + size: directory_size, + })); + } + + if file_type.is_symlink() { + let target = std::fs::read_link(&entry_path) + .map_err(|e| Error::UnableToStat(entry_path.clone(), e))?; + + return Ok(proto::node::Node::Symlink(proto::SymlinkNode { + name: entry + .file_name() + .to_str() + .map(|s| Ok(s.to_owned())) + .unwrap_or(Err(Error::InvalidEncoding(entry.path().to_path_buf())))?, + target: target + .to_str() + .map(|s| Ok(s.to_owned())) + .unwrap_or(Err(Error::InvalidEncoding(entry.path().to_path_buf())))?, + })); + } + + if file_type.is_file() { + let metadata = entry + .metadata() + .map_err(|e| Error::UnableToStat(entry_path.clone(), e.into()))?; + + let mut file = File::open(entry_path.clone()) + .map_err(|e| Error::UnableToOpen(entry_path.clone(), e))?; + + let mut writer = blob_service.open_write()?; + + if let Err(e) = io::copy(&mut file, &mut writer) { + return Err(Error::UnableToRead(entry_path, e)); + }; + + let digest = writer.close()?; + + return Ok(proto::node::Node::File(proto::FileNode { + name: entry + .file_name() + .to_str() + .map(|s| Ok(s.to_owned())) + .unwrap_or(Err(Error::InvalidEncoding(entry.path().to_path_buf())))?, + digest: digest.to_vec(), + size: metadata.len() as u32, + // If it's executable by the user, it'll become executable. + // This matches nix's dump() function behaviour. + executable: metadata.permissions().mode() & 64 != 0, + })); + } + todo!("handle other types") +} + +/// Ingests the contents at the given path into the tvix store, +/// interacting with a [BlobService] and [DirectoryService]. +/// It returns the root node or an error. +/// +/// It's not interacting with a [PathInfoService], it's up to the caller to +/// possibly register it somewhere (and potentially rename it based on some +/// naming scheme. +#[instrument(skip(blob_service, directory_service), fields(path=?p))] +pub fn ingest_path<BS: BlobService, DS: DirectoryService, P: AsRef<Path> + Debug>( + blob_service: &BS, + directory_service: &DS, + p: P, +) -> Result<proto::node::Node, Error> { + // Probe if the path points to a symlink. If it does, we process it manually, + // due to https://github.com/BurntSushi/walkdir/issues/175. + let symlink_metadata = fs::symlink_metadata(p.as_ref()) + .map_err(|e| Error::UnableToStat(p.as_ref().to_path_buf(), e))?; + if symlink_metadata.is_symlink() { + let target = std::fs::read_link(p.as_ref()) + .map_err(|e| Error::UnableToStat(p.as_ref().to_path_buf(), e))?; + return Ok(proto::node::Node::Symlink(proto::SymlinkNode { + name: p + .as_ref() + .file_name() + .unwrap_or_default() + .to_str() + .map(|s| Ok(s.to_owned())) + .unwrap_or(Err(Error::InvalidEncoding(p.as_ref().to_path_buf())))?, + target: target + .to_str() + .map(|s| Ok(s.to_owned())) + .unwrap_or(Err(Error::InvalidEncoding(p.as_ref().to_path_buf())))?, + })); + } + + let mut directories: HashMap<PathBuf, proto::Directory> = HashMap::default(); + + let mut directory_putter = directory_service.put_multiple_start(); + + for entry in WalkDir::new(p) + .follow_links(false) + .contents_first(true) + .sort_by_file_name() + { + let entry = entry.unwrap(); + + // process_entry wants an Option<Directory> in case the entry points to a directory. + // make sure to provide it. + let maybe_directory: Option<proto::Directory> = { + if entry.file_type().is_dir() { + Some( + directories + .entry(entry.path().to_path_buf()) + .or_default() + .clone(), + ) + } else { + None + } + }; + + let node = process_entry(blob_service, &mut directory_putter, &entry, maybe_directory)?; + + if entry.depth() == 0 { + return Ok(node); + } else { + // calculate the parent path, and make sure we register the node there. + // NOTE: entry.depth() > 0 + let parent_path = entry.path().parent().unwrap().to_path_buf(); + + // record node in parent directory, creating a new [proto:Directory] if not there yet. + let parent_directory = directories.entry(parent_path).or_default(); + match node { + proto::node::Node::Directory(e) => parent_directory.directories.push(e), + proto::node::Node::File(e) => parent_directory.files.push(e), + proto::node::Node::Symlink(e) => parent_directory.symlinks.push(e), + } + } + } + // unreachable, we already bailed out before if root doesn't exist. + panic!("tvix bug") +} diff --git a/tvix/store/src/lib.rs b/tvix/store/src/lib.rs new file mode 100644 index 000000000000..7ae8587f8b26 --- /dev/null +++ b/tvix/store/src/lib.rs @@ -0,0 +1,17 @@ +mod digests; +mod errors; +mod store_io; + +pub mod blobservice; +pub mod directoryservice; +pub mod import; +pub mod nar; +pub mod pathinfoservice; +pub mod proto; + +pub use digests::B3Digest; +pub use errors::Error; +pub use store_io::TvixStoreIO; + +#[cfg(test)] +mod tests; diff --git a/tvix/store/src/nar/grpc_nar_calculation_service.rs b/tvix/store/src/nar/grpc_nar_calculation_service.rs new file mode 100644 index 000000000000..429593743914 --- /dev/null +++ b/tvix/store/src/nar/grpc_nar_calculation_service.rs @@ -0,0 +1,69 @@ +use super::NARCalculationService; +use crate::proto; +use tonic::transport::Channel; +use tonic::Status; + +/// A NAR calculation service which asks a remote tvix-store for NAR calculation +/// (via the gRPC PathInfoService). +#[derive(Clone)] +pub struct GRPCNARCalculationService { + /// A handle into the active tokio runtime. Necessary to spawn tasks. + tokio_handle: tokio::runtime::Handle, + + /// The internal reference to a gRPC client. + /// Cloning it is cheap, and it internally handles concurrent requests. + grpc_client: proto::path_info_service_client::PathInfoServiceClient<Channel>, +} + +impl GRPCNARCalculationService { + /// construct a new [GRPCNARCalculationService], by passing a handle to the + /// tokio runtime, and a gRPC client. + pub fn new( + tokio_handle: tokio::runtime::Handle, + grpc_client: proto::path_info_service_client::PathInfoServiceClient<Channel>, + ) -> Self { + Self { + tokio_handle, + grpc_client, + } + } + + /// construct a [GRPCNARCalculationService], from a [proto::path_info_service_client::PathInfoServiceClient<Channel>]. + /// panics if called outside the context of a tokio runtime. + pub fn from_client( + grpc_client: proto::path_info_service_client::PathInfoServiceClient<Channel>, + ) -> Self { + Self { + tokio_handle: tokio::runtime::Handle::current(), + grpc_client, + } + } +} + +impl NARCalculationService for GRPCNARCalculationService { + fn calculate_nar( + &self, + root_node: &proto::node::Node, + ) -> Result<(u64, [u8; 32]), super::RenderError> { + // Get a new handle to the gRPC client, and copy the root node. + let mut grpc_client = self.grpc_client.clone(); + let root_node = root_node.clone(); + + let task: tokio::task::JoinHandle<Result<_, Status>> = + self.tokio_handle.spawn(async move { + Ok(grpc_client + .calculate_nar(proto::Node { + node: Some(root_node), + }) + .await? + .into_inner()) + }); + + match self.tokio_handle.block_on(task).unwrap() { + Ok(resp) => Ok((resp.nar_size, resp.nar_sha256.to_vec().try_into().unwrap())), + Err(e) => Err(super::RenderError::StoreError(crate::Error::StorageError( + e.to_string(), + ))), + } + } +} diff --git a/tvix/store/src/nar/mod.rs b/tvix/store/src/nar/mod.rs new file mode 100644 index 000000000000..a29cc5451bae --- /dev/null +++ b/tvix/store/src/nar/mod.rs @@ -0,0 +1,35 @@ +use crate::{proto, B3Digest}; +use data_encoding::BASE64; +use thiserror::Error; + +mod grpc_nar_calculation_service; +mod non_caching_calculation_service; +mod renderer; + +pub use grpc_nar_calculation_service::GRPCNARCalculationService; +pub use non_caching_calculation_service::NonCachingNARCalculationService; +pub use renderer::NARRenderer; + +/// Errors that can encounter while rendering NARs. +#[derive(Debug, Error)] +pub enum RenderError { + #[error("failure talking to a backing store client: {0}")] + StoreError(crate::Error), + + #[error("unable to find directory {}, referred from {}", .0, .1)] + DirectoryNotFound(B3Digest, String), + + #[error("unable to find blob {}, referred from {}", BASE64.encode(.0), .1)] + BlobNotFound([u8; 32], String), + + #[error("unexpected size in metadata for blob {}, referred from {} returned, expected {}, got {}", BASE64.encode(.0), .1, .2, .3)] + UnexpectedBlobMeta([u8; 32], String, u32, u32), + + #[error("failure using the NAR writer: {0}")] + NARWriterError(std::io::Error), +} + +/// The base trait for something calculating NARs, and returning their size and sha256. +pub trait NARCalculationService { + fn calculate_nar(&self, root_node: &proto::node::Node) -> Result<(u64, [u8; 32]), RenderError>; +} diff --git a/tvix/store/src/nar/non_caching_calculation_service.rs b/tvix/store/src/nar/non_caching_calculation_service.rs new file mode 100644 index 000000000000..8a080cb4df5e --- /dev/null +++ b/tvix/store/src/nar/non_caching_calculation_service.rs @@ -0,0 +1,37 @@ +use count_write::CountWrite; +use sha2::{Digest, Sha256}; + +use crate::blobservice::BlobService; +use crate::directoryservice::DirectoryService; +use crate::proto; + +use super::renderer::NARRenderer; +use super::{NARCalculationService, RenderError}; + +/// A NAR calculation service which simply renders the whole NAR whenever +/// we ask for the calculation. +#[derive(Clone)] +pub struct NonCachingNARCalculationService<BS: BlobService, DS: DirectoryService> { + nar_renderer: NARRenderer<BS, DS>, +} + +impl<BS: BlobService, DS: DirectoryService> NonCachingNARCalculationService<BS, DS> { + pub fn new(blob_service: BS, directory_service: DS) -> Self { + Self { + nar_renderer: NARRenderer::new(blob_service, directory_service), + } + } +} + +impl<BS: BlobService, DS: DirectoryService> NARCalculationService + for NonCachingNARCalculationService<BS, DS> +{ + fn calculate_nar(&self, root_node: &proto::node::Node) -> Result<(u64, [u8; 32]), RenderError> { + let h = Sha256::new(); + let mut cw = CountWrite::from(h); + + self.nar_renderer.write_nar(&mut cw, root_node)?; + + Ok((cw.count(), cw.into_inner().finalize().into())) + } +} diff --git a/tvix/store/src/nar/renderer.rs b/tvix/store/src/nar/renderer.rs new file mode 100644 index 000000000000..c10f2ddf52fa --- /dev/null +++ b/tvix/store/src/nar/renderer.rs @@ -0,0 +1,136 @@ +use super::RenderError; +use crate::{ + blobservice::BlobService, + directoryservice::DirectoryService, + proto::{self, NamedNode}, + B3Digest, +}; +use nix_compat::nar; +use std::io::{self, BufReader}; +use tracing::warn; + +/// A NAR renderer, using a blob_service, chunk_service and directory_service +/// to render a NAR to a writer. +#[derive(Clone)] +pub struct NARRenderer<BS: BlobService, DS: DirectoryService> { + blob_service: BS, + directory_service: DS, +} + +impl<BS: BlobService, DS: DirectoryService> NARRenderer<BS, DS> { + pub fn new(blob_service: BS, directory_service: DS) -> Self { + Self { + blob_service, + directory_service, + } + } + + /// Consumes a [proto::node::Node] pointing to the root of a (store) path, + /// and writes the contents in NAR serialization to the passed + /// [std::io::Write]. + /// + /// It uses the different clients in the struct to perform the necessary + /// lookups as it traverses the structure. + pub fn write_nar<W: std::io::Write>( + &self, + w: &mut W, + proto_root_node: &proto::node::Node, + ) -> Result<(), RenderError> { + // Initialize NAR writer + let nar_root_node = nar::writer::open(w).map_err(RenderError::NARWriterError)?; + + self.walk_node(nar_root_node, proto_root_node) + } + + /// Process an intermediate node in the structure. + /// This consumes the node. + fn walk_node( + &self, + nar_node: nar::writer::Node, + proto_node: &proto::node::Node, + ) -> Result<(), RenderError> { + match proto_node { + proto::node::Node::Symlink(proto_symlink_node) => { + nar_node + .symlink(&proto_symlink_node.target) + .map_err(RenderError::NARWriterError)?; + } + proto::node::Node::File(proto_file_node) => { + let digest = B3Digest::from_vec(proto_file_node.digest.clone()).map_err(|_e| { + warn!( + file_node = ?proto_file_node, + "invalid digest length in file node", + ); + + RenderError::StoreError(crate::Error::StorageError( + "invalid digest len in file node".to_string(), + )) + })?; + + let mut blob_reader = match self + .blob_service + .open_read(&digest) + .map_err(RenderError::StoreError)? + { + Some(blob_reader) => Ok(BufReader::new(blob_reader)), + None => Err(RenderError::NARWriterError(io::Error::new( + io::ErrorKind::NotFound, + format!("blob with digest {} not found", &digest), + ))), + }?; + + nar_node + .file( + proto_file_node.executable, + proto_file_node.size.into(), + &mut blob_reader, + ) + .map_err(RenderError::NARWriterError)?; + } + proto::node::Node::Directory(proto_directory_node) => { + let digest = + B3Digest::from_vec(proto_directory_node.digest.to_vec()).map_err(|_e| { + RenderError::StoreError(crate::Error::StorageError( + "invalid digest len in directory node".to_string(), + )) + })?; + + // look it up with the directory service + let resp = self + .directory_service + .get(&digest) + .map_err(RenderError::StoreError)?; + + match resp { + // if it's None, that's an error! + None => { + return Err(RenderError::DirectoryNotFound( + digest, + proto_directory_node.name.to_owned(), + )) + } + Some(proto_directory) => { + // start a directory node + let mut nar_node_directory = + nar_node.directory().map_err(RenderError::NARWriterError)?; + + // for each node in the directory, create a new entry with its name, + // and then invoke walk_node on that entry. + for proto_node in proto_directory.nodes() { + let child_node = nar_node_directory + .entry(proto_node.get_name()) + .map_err(RenderError::NARWriterError)?; + self.walk_node(child_node, &proto_node)?; + } + + // close the directory + nar_node_directory + .close() + .map_err(RenderError::NARWriterError)?; + } + } + } + } + Ok(()) + } +} diff --git a/tvix/store/src/pathinfoservice/grpc.rs b/tvix/store/src/pathinfoservice/grpc.rs new file mode 100644 index 000000000000..6bb774c668a3 --- /dev/null +++ b/tvix/store/src/pathinfoservice/grpc.rs @@ -0,0 +1,81 @@ +use super::PathInfoService; +use crate::proto; +use tonic::{transport::Channel, Code, Status}; + +/// Connects to a (remote) tvix-store PathInfoService over gRPC. +#[derive(Clone)] +pub struct GRPCPathInfoService { + /// A handle into the active tokio runtime. Necessary to spawn tasks. + tokio_handle: tokio::runtime::Handle, + + /// The internal reference to a gRPC client. + /// Cloning it is cheap, and it internally handles concurrent requests. + grpc_client: proto::path_info_service_client::PathInfoServiceClient<Channel>, +} + +impl GRPCPathInfoService { + /// Construct a new [GRPCPathInfoService], by passing a handle to the tokio + /// runtime, and a gRPC client. + pub fn new( + tokio_handle: tokio::runtime::Handle, + grpc_client: proto::path_info_service_client::PathInfoServiceClient<Channel>, + ) -> Self { + Self { + tokio_handle, + grpc_client, + } + } + + /// construct a [GRPCDirectoryService] from a [proto::path_info_service_client::PathInfoServiceClient<Channel>]. + /// panics if called outside the context of a tokio runtime. + pub fn from_client( + grpc_client: proto::path_info_service_client::PathInfoServiceClient<Channel>, + ) -> Self { + Self { + tokio_handle: tokio::runtime::Handle::current(), + grpc_client, + } + } +} + +impl PathInfoService for GRPCPathInfoService { + fn get(&self, digest: [u8; 20]) -> Result<Option<proto::PathInfo>, crate::Error> { + // Get a new handle to the gRPC client. + let mut grpc_client = self.grpc_client.clone(); + + let task: tokio::task::JoinHandle<Result<proto::PathInfo, Status>> = + self.tokio_handle.spawn(async move { + let path_info = grpc_client + .get(proto::GetPathInfoRequest { + by_what: Some(proto::get_path_info_request::ByWhat::ByOutputHash( + digest.to_vec(), + )), + }) + .await? + .into_inner(); + + Ok(path_info) + }); + + match self.tokio_handle.block_on(task)? { + Ok(path_info) => Ok(Some(path_info)), + Err(e) if e.code() == Code::NotFound => Ok(None), + Err(e) => Err(crate::Error::StorageError(e.to_string())), + } + } + + fn put(&self, path_info: proto::PathInfo) -> Result<proto::PathInfo, crate::Error> { + // Get a new handle to the gRPC client. + let mut grpc_client = self.grpc_client.clone(); + + let task: tokio::task::JoinHandle<Result<proto::PathInfo, Status>> = + self.tokio_handle.spawn(async move { + let path_info = grpc_client.put(path_info).await?.into_inner(); + Ok(path_info) + }); + + self.tokio_handle + .block_on(task)? + .map_err(|e| crate::Error::StorageError(e.to_string())) + } +} diff --git a/tvix/store/src/pathinfoservice/memory.rs b/tvix/store/src/pathinfoservice/memory.rs new file mode 100644 index 000000000000..d0ff1976efab --- /dev/null +++ b/tvix/store/src/pathinfoservice/memory.rs @@ -0,0 +1,41 @@ +use super::PathInfoService; +use crate::{proto, Error}; +use std::{ + collections::HashMap, + sync::{Arc, RwLock}, +}; + +#[derive(Default)] +pub struct MemoryPathInfoService { + db: Arc<RwLock<HashMap<[u8; 20], proto::PathInfo>>>, +} + +impl PathInfoService for MemoryPathInfoService { + fn get(&self, digest: [u8; 20]) -> Result<Option<proto::PathInfo>, Error> { + let db = self.db.read().unwrap(); + + match db.get(&digest) { + None => Ok(None), + Some(path_info) => Ok(Some(path_info.clone())), + } + } + + fn put(&self, path_info: proto::PathInfo) -> Result<proto::PathInfo, Error> { + // Call validate on the received PathInfo message. + match path_info.validate() { + Err(e) => Err(Error::InvalidRequest(format!( + "failed to validate PathInfo: {}", + e + ))), + + // In case the PathInfo is valid, and we were able to extract a NixPath, store it in the database. + // This overwrites existing PathInfo objects. + Ok(nix_path) => { + let mut db = self.db.write().unwrap(); + db.insert(nix_path.digest, path_info.clone()); + + Ok(path_info) + } + } + } +} diff --git a/tvix/store/src/pathinfoservice/mod.rs b/tvix/store/src/pathinfoservice/mod.rs new file mode 100644 index 000000000000..6a34e09af478 --- /dev/null +++ b/tvix/store/src/pathinfoservice/mod.rs @@ -0,0 +1,20 @@ +mod grpc; +mod memory; +mod sled; + +use crate::{proto, Error}; + +pub use self::grpc::GRPCPathInfoService; +pub use self::memory::MemoryPathInfoService; +pub use self::sled::SledPathInfoService; + +/// The base trait all PathInfo services need to implement. +/// This is a simple get and put of [proto::Directory], returning their digest. +pub trait PathInfoService { + /// Retrieve a PathInfo message by the output digest. + fn get(&self, digest: [u8; 20]) -> Result<Option<proto::PathInfo>, Error>; + + /// Store a PathInfo message. Implementations MUST call validate and reject + /// invalid messages. + fn put(&self, path_info: proto::PathInfo) -> Result<proto::PathInfo, Error>; +} diff --git a/tvix/store/src/pathinfoservice/sled.rs b/tvix/store/src/pathinfoservice/sled.rs new file mode 100644 index 000000000000..8776ebcbc106 --- /dev/null +++ b/tvix/store/src/pathinfoservice/sled.rs @@ -0,0 +1,76 @@ +use super::PathInfoService; +use crate::{proto, Error}; +use prost::Message; +use std::path::PathBuf; +use tracing::warn; + +/// SledPathInfoService stores PathInfo in a [sled](https://github.com/spacejam/sled). +/// +/// The PathInfo messages are stored as encoded protos, and keyed by their output hash, +/// as that's currently the only request type available. +#[derive(Clone)] +pub struct SledPathInfoService { + db: sled::Db, +} + +impl SledPathInfoService { + pub fn new(p: PathBuf) -> Result<Self, sled::Error> { + let config = sled::Config::default().use_compression(true).path(p); + let db = config.open()?; + + Ok(Self { db }) + } + + pub fn new_temporary() -> Result<Self, sled::Error> { + let config = sled::Config::default().temporary(true); + let db = config.open()?; + + Ok(Self { db }) + } +} + +impl PathInfoService for SledPathInfoService { + fn get(&self, digest: [u8; 20]) -> Result<Option<proto::PathInfo>, Error> { + match self.db.get(digest) { + Ok(None) => Ok(None), + Ok(Some(data)) => match proto::PathInfo::decode(&*data) { + Ok(path_info) => Ok(Some(path_info)), + Err(e) => { + warn!("failed to decode stored PathInfo: {}", e); + Err(Error::StorageError(format!( + "failed to decode stored PathInfo: {}", + e + ))) + } + }, + Err(e) => { + warn!("failed to retrieve PathInfo: {}", e); + Err(Error::StorageError(format!( + "failed to retrieve PathInfo: {}", + e + ))) + } + } + } + + fn put(&self, path_info: proto::PathInfo) -> Result<proto::PathInfo, Error> { + // Call validate on the received PathInfo message. + match path_info.validate() { + Err(e) => Err(Error::InvalidRequest(format!( + "failed to validate PathInfo: {}", + e + ))), + // In case the PathInfo is valid, and we were able to extract a NixPath, store it in the database. + // This overwrites existing PathInfo objects. + Ok(nix_path) => match self.db.insert(nix_path.digest, path_info.encode_to_vec()) { + Ok(_) => Ok(path_info), + Err(e) => { + warn!("failed to insert PathInfo: {}", e); + Err(Error::StorageError(format! { + "failed to insert PathInfo: {}", e + })) + } + }, + } + } +} diff --git a/tvix/store/src/proto/grpc_blobservice_wrapper.rs b/tvix/store/src/proto/grpc_blobservice_wrapper.rs new file mode 100644 index 000000000000..3ec1d68872c7 --- /dev/null +++ b/tvix/store/src/proto/grpc_blobservice_wrapper.rs @@ -0,0 +1,130 @@ +use crate::{ + blobservice::{BlobService, BlobWriter}, + proto::sync_read_into_async_read::SyncReadIntoAsyncRead, + B3Digest, +}; +use std::{collections::VecDeque, io, pin::Pin}; +use tokio::task; +use tokio_stream::StreamExt; +use tokio_util::io::ReaderStream; +use tonic::{async_trait, Request, Response, Status, Streaming}; +use tracing::{instrument, warn}; + +pub struct GRPCBlobServiceWrapper<BS: BlobService> { + blob_service: BS, +} + +impl<BS: BlobService> From<BS> for GRPCBlobServiceWrapper<BS> { + fn from(value: BS) -> Self { + Self { + blob_service: value, + } + } +} + +#[async_trait] +impl<BS: BlobService + Send + Sync + Clone + 'static> super::blob_service_server::BlobService + for GRPCBlobServiceWrapper<BS> +{ + // https://github.com/tokio-rs/tokio/issues/2723#issuecomment-1534723933 + type ReadStream = + Pin<Box<dyn futures::Stream<Item = Result<super::BlobChunk, Status>> + Send + 'static>>; + + #[instrument(skip(self))] + async fn stat( + &self, + request: Request<super::StatBlobRequest>, + ) -> Result<Response<super::BlobMeta>, Status> { + let rq = request.into_inner(); + let req_digest = B3Digest::from_vec(rq.digest) + .map_err(|_e| Status::invalid_argument("invalid digest length"))?; + + if rq.include_chunks || rq.include_bao { + return Err(Status::internal("not implemented")); + } + + match self.blob_service.has(&req_digest) { + Ok(true) => Ok(Response::new(super::BlobMeta::default())), + Ok(false) => Err(Status::not_found(format!("blob {} not found", &req_digest))), + Err(e) => Err(e.into()), + } + } + + #[instrument(skip(self))] + async fn read( + &self, + request: Request<super::ReadBlobRequest>, + ) -> Result<Response<Self::ReadStream>, Status> { + let rq = request.into_inner(); + + let req_digest = B3Digest::from_vec(rq.digest) + .map_err(|_e| Status::invalid_argument("invalid digest length"))?; + + match self.blob_service.open_read(&req_digest) { + Ok(Some(reader)) => { + let async_reader: SyncReadIntoAsyncRead<_, bytes::BytesMut> = reader.into(); + + fn stream_mapper( + x: Result<bytes::Bytes, io::Error>, + ) -> Result<super::BlobChunk, Status> { + match x { + Ok(bytes) => Ok(super::BlobChunk { + data: bytes.to_vec(), + }), + Err(e) => Err(Status::from(e)), + } + } + + let chunks_stream = ReaderStream::new(async_reader).map(stream_mapper); + Ok(Response::new(Box::pin(chunks_stream))) + } + Ok(None) => Err(Status::not_found(format!("blob {} not found", &req_digest))), + Err(e) => Err(e.into()), + } + } + + #[instrument(skip(self))] + async fn put( + &self, + request: Request<Streaming<super::BlobChunk>>, + ) -> Result<Response<super::PutBlobResponse>, Status> { + let req_inner = request.into_inner(); + + let data_stream = req_inner.map(|x| { + x.map(|x| VecDeque::from(x.data)) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidInput, e)) + }); + + let data_reader = tokio_util::io::StreamReader::new(data_stream); + + // prepare a writer, which we'll use in the blocking task below. + let mut writer = self + .blob_service + .open_write() + .map_err(|e| Status::internal(format!("unable to open for write: {}", e)))?; + + let result = task::spawn_blocking(move || -> Result<super::PutBlobResponse, Status> { + // construct a sync reader to the data + let mut reader = tokio_util::io::SyncIoBridge::new(data_reader); + + io::copy(&mut reader, &mut writer).map_err(|e| { + warn!("error copying: {}", e); + Status::internal("error copying") + })?; + + let digest = writer + .close() + .map_err(|e| { + warn!("error closing stream: {}", e); + Status::internal("error closing stream") + })? + .to_vec(); + + Ok(super::PutBlobResponse { digest }) + }) + .await + .map_err(|_| Status::internal("failed to wait for task"))??; + + Ok(Response::new(result)) + } +} diff --git a/tvix/store/src/proto/grpc_directoryservice_wrapper.rs b/tvix/store/src/proto/grpc_directoryservice_wrapper.rs new file mode 100644 index 000000000000..6d2df310137f --- /dev/null +++ b/tvix/store/src/proto/grpc_directoryservice_wrapper.rs @@ -0,0 +1,177 @@ +use crate::proto; +use crate::{directoryservice::DirectoryService, B3Digest}; +use std::collections::HashMap; +use tokio::{sync::mpsc::channel, task}; +use tokio_stream::wrappers::ReceiverStream; +use tonic::{async_trait, Request, Response, Status, Streaming}; +use tracing::{debug, instrument, warn}; + +pub struct GRPCDirectoryServiceWrapper<C: DirectoryService> { + directory_service: C, +} + +impl<DS: DirectoryService> From<DS> for GRPCDirectoryServiceWrapper<DS> { + fn from(value: DS) -> Self { + Self { + directory_service: value, + } + } +} + +#[async_trait] +impl<DS: DirectoryService + Send + Sync + Clone + 'static> + proto::directory_service_server::DirectoryService for GRPCDirectoryServiceWrapper<DS> +{ + type GetStream = ReceiverStream<tonic::Result<proto::Directory, Status>>; + + #[instrument(skip(self))] + async fn get( + &self, + request: Request<proto::GetDirectoryRequest>, + ) -> Result<Response<Self::GetStream>, Status> { + let (tx, rx) = channel(5); + + let req_inner = request.into_inner(); + + let directory_service = self.directory_service.clone(); + + let _task = { + // look at the digest in the request and put it in the top of the queue. + match &req_inner.by_what { + None => return Err(Status::invalid_argument("by_what needs to be specified")), + Some(proto::get_directory_request::ByWhat::Digest(digest)) => { + let digest = B3Digest::from_vec(digest.to_vec()) + .map_err(|_e| Status::invalid_argument("invalid digest length"))?; + + task::spawn(async move { + if !req_inner.recursive { + let e: Result<proto::Directory, Status> = + match directory_service.get(&digest) { + Ok(Some(directory)) => Ok(directory), + Ok(None) => Err(Status::not_found(format!( + "directory {} not found", + digest + ))), + Err(e) => Err(e.into()), + }; + + if tx.send(e).await.is_err() { + debug!("receiver dropped"); + } + } else { + // If recursive was requested, traverse via get_recursive. + let directories_it = directory_service.get_recursive(&digest); + + for e in directories_it { + // map err in res from Error to Status + let res = e.map_err(|e| Status::internal(e.to_string())); + if tx.send(res).await.is_err() { + debug!("receiver dropped"); + break; + } + } + } + }); + } + } + }; + + let receiver_stream = ReceiverStream::new(rx); + Ok(Response::new(receiver_stream)) + } + + #[instrument(skip(self, request))] + async fn put( + &self, + request: Request<Streaming<proto::Directory>>, + ) -> Result<Response<proto::PutDirectoryResponse>, Status> { + let mut req_inner = request.into_inner(); + // TODO: let this use DirectoryPutter to the store it's connected to, + // and move the validation logic into [SimplePutter]. + + // This keeps track of the seen directory keys, and their size. + // This is used to validate the size field of a reference to a previously sent directory. + // We don't need to keep the contents around, they're stored in the DB. + let mut seen_directories_sizes: HashMap<B3Digest, u32> = HashMap::new(); + let mut last_directory_dgst: Option<B3Digest> = None; + + // Consume directories, and insert them into the store. + // Reject directory messages that refer to Directories not sent in the same stream. + while let Some(directory) = req_inner.message().await? { + // validate the directory itself. + if let Err(e) = directory.validate() { + return Err(Status::invalid_argument(format!( + "directory {} failed validation: {}", + directory.digest(), + e, + ))); + } + + // for each child directory this directory refers to, we need + // to ensure it has been seen already in this stream, and that the size + // matches what we recorded. + for child_directory in &directory.directories { + let child_directory_digest = B3Digest::from_vec(child_directory.digest.to_vec()) + .map_err(|_e| Status::internal("invalid child directory digest len"))?; + + match seen_directories_sizes.get(&child_directory_digest) { + None => { + return Err(Status::invalid_argument(format!( + "child directory '{}' ({}) in directory '{}' not seen yet", + child_directory.name, + &child_directory_digest, + &directory.digest(), + ))); + } + Some(seen_child_directory_size) => { + if seen_child_directory_size != &child_directory.size { + return Err(Status::invalid_argument(format!( + "child directory '{}' ({}) in directory '{}' referred with wrong size, expected {}, actual {}", + child_directory.name, + &child_directory_digest, + &directory.digest(), + seen_child_directory_size, + child_directory.size, + ))); + } + } + } + } + + // NOTE: We can't know if a directory we're receiving actually is + // part of the closure, because we receive directories from the leaf nodes up to + // the root. + // The only thing we could to would be doing a final check when the + // last Directory was received, that all Directories received so far are + // reachable from that (root) node. + + let dgst = directory.digest(); + seen_directories_sizes.insert(dgst.clone(), directory.size()); + last_directory_dgst = Some(dgst.clone()); + + // check if the directory already exists in the database. We can skip + // inserting if it's already there, as that'd be a no-op. + match self.directory_service.get(&dgst) { + Err(e) => { + warn!("error checking if directory already exists: {}", e); + return Err(e.into()); + } + // skip if already exists + Ok(Some(_)) => {} + // insert if it doesn't already exist + Ok(None) => { + self.directory_service.put(directory)?; + } + } + } + + // We're done receiving. peek at last_directory_digest and either return the digest, + // or an error, if we received an empty stream. + match last_directory_dgst { + None => Err(Status::invalid_argument("no directories received")), + Some(last_directory_dgst) => Ok(Response::new(proto::PutDirectoryResponse { + root_digest: last_directory_dgst.to_vec(), + })), + } + } +} diff --git a/tvix/store/src/proto/grpc_pathinfoservice_wrapper.rs b/tvix/store/src/proto/grpc_pathinfoservice_wrapper.rs new file mode 100644 index 000000000000..e82557b3a06c --- /dev/null +++ b/tvix/store/src/proto/grpc_pathinfoservice_wrapper.rs @@ -0,0 +1,93 @@ +use crate::nar::RenderError; +use crate::proto; +use crate::{nar::NARCalculationService, pathinfoservice::PathInfoService}; +use tonic::{async_trait, Request, Response, Result, Status}; +use tracing::{instrument, warn}; + +pub struct GRPCPathInfoServiceWrapper<PS: PathInfoService, NS: NARCalculationService> { + path_info_service: PS, + nar_calculation_service: NS, +} + +impl<PS: PathInfoService, NS: NARCalculationService> GRPCPathInfoServiceWrapper<PS, NS> { + pub fn new(path_info_service: PS, nar_calculation_service: NS) -> Self { + Self { + path_info_service, + nar_calculation_service, + } + } +} + +#[async_trait] +impl< + PS: PathInfoService + Send + Sync + 'static, + NS: NARCalculationService + Send + Sync + 'static, + > proto::path_info_service_server::PathInfoService for GRPCPathInfoServiceWrapper<PS, NS> +{ + #[instrument(skip(self))] + async fn get( + &self, + request: Request<proto::GetPathInfoRequest>, + ) -> Result<Response<proto::PathInfo>> { + match request.into_inner().by_what { + None => Err(Status::unimplemented("by_what needs to be specified")), + Some(proto::get_path_info_request::ByWhat::ByOutputHash(digest)) => { + let digest: [u8; 20] = digest + .try_into() + .map_err(|_e| Status::invalid_argument("invalid digest length"))?; + match self.path_info_service.get(digest) { + Ok(None) => Err(Status::not_found("PathInfo not found")), + Ok(Some(path_info)) => Ok(Response::new(path_info)), + Err(e) => { + warn!("failed to retrieve PathInfo: {}", e); + Err(e.into()) + } + } + } + } + } + + #[instrument(skip(self))] + async fn put(&self, request: Request<proto::PathInfo>) -> Result<Response<proto::PathInfo>> { + let path_info = request.into_inner(); + + // Store the PathInfo in the client. Clients MUST validate the data + // they receive, so we don't validate additionally here. + match self.path_info_service.put(path_info) { + Ok(path_info_new) => Ok(Response::new(path_info_new)), + Err(e) => { + warn!("failed to insert PathInfo: {}", e); + Err(e.into()) + } + } + } + + #[instrument(skip(self))] + async fn calculate_nar( + &self, + request: Request<proto::Node>, + ) -> Result<Response<proto::CalculateNarResponse>> { + match request.into_inner().node { + None => Err(Status::invalid_argument("no root node sent")), + Some(root_node) => match self.nar_calculation_service.calculate_nar(&root_node) { + Ok((nar_size, nar_sha256)) => Ok(Response::new(proto::CalculateNarResponse { + nar_size, + nar_sha256: nar_sha256.to_vec(), + })), + Err(e) => Err(e.into()), + }, + } + } +} + +impl From<RenderError> for tonic::Status { + fn from(value: RenderError) -> Self { + match value { + RenderError::BlobNotFound(_, _) => Self::not_found(value.to_string()), + RenderError::DirectoryNotFound(_, _) => Self::not_found(value.to_string()), + RenderError::NARWriterError(_) => Self::internal(value.to_string()), + RenderError::StoreError(_) => Self::internal(value.to_string()), + RenderError::UnexpectedBlobMeta(_, _, _, _) => Self::internal(value.to_string()), + } + } +} diff --git a/tvix/store/src/proto/mod.rs b/tvix/store/src/proto/mod.rs new file mode 100644 index 000000000000..4db0b9731edc --- /dev/null +++ b/tvix/store/src/proto/mod.rs @@ -0,0 +1,370 @@ +#![allow(clippy::derive_partial_eq_without_eq)] +// https://github.com/hyperium/tonic/issues/1056 +use std::{collections::HashSet, iter::Peekable}; +use thiserror::Error; + +use prost::Message; + +use nix_compat::store_path::{self, StorePath}; + +mod grpc_blobservice_wrapper; +mod grpc_directoryservice_wrapper; +mod grpc_pathinfoservice_wrapper; + +mod sync_read_into_async_read; + +pub use grpc_blobservice_wrapper::GRPCBlobServiceWrapper; +pub use grpc_directoryservice_wrapper::GRPCDirectoryServiceWrapper; +pub use grpc_pathinfoservice_wrapper::GRPCPathInfoServiceWrapper; + +use crate::B3Digest; + +tonic::include_proto!("tvix.store.v1"); + +#[cfg(feature = "reflection")] +/// Compiled file descriptors for implementing [gRPC +/// reflection](https://github.com/grpc/grpc/blob/master/doc/server-reflection.md) with e.g. +/// [`tonic_reflection`](https://docs.rs/tonic-reflection). +pub const FILE_DESCRIPTOR_SET: &[u8] = tonic::include_file_descriptor_set!("tvix.store.v1"); + +#[cfg(test)] +mod tests; + +/// Errors that can occur during the validation of Directory messages. +#[derive(Debug, PartialEq, Eq, Error)] +pub enum ValidateDirectoryError { + /// Elements are not in sorted order + #[error("{0} is not sorted")] + WrongSorting(String), + /// Multiple elements with the same name encountered + #[error("{0} is a duplicate name")] + DuplicateName(String), + /// Invalid name encountered + #[error("Invalid name in {0}")] + InvalidName(String), + /// Invalid digest length encountered + #[error("Invalid Digest length: {0}")] + InvalidDigestLen(usize), +} + +/// Errors that can occur during the validation of PathInfo messages. +#[derive(Debug, Error, PartialEq)] +pub enum ValidatePathInfoError { + /// No node present + #[error("No node present")] + NoNodePresent(), + + /// Invalid node name encountered. + #[error("Failed to parse {0} as StorePath: {1}")] + InvalidNodeName(String, store_path::Error), + + /// The digest the (root) node refers to has invalid length. + #[error("Invalid Digest length: {0}")] + InvalidDigestLen(usize), + + /// The number of references in the narinfo.reference_names field does not match + /// the number of references in the .references field. + #[error("Inconsistent Number of References: {0} (references) vs {0} (narinfo)")] + InconsistentNumberOfReferences(usize, usize), +} + +/// Checks a Node name for validity as an intermediate node, and returns an +/// error that's generated from the supplied constructor. +/// +/// We disallow slashes, null bytes, '.', '..' and the empty string. +fn validate_node_name<E>(name: &str, err: fn(String) -> E) -> Result<(), E> { + if name.is_empty() || name == ".." || name == "." || name.contains('\x00') || name.contains('/') + { + return Err(err(name.to_string())); + } + Ok(()) +} + +/// Checks a digest for validity. +/// Digests are 32 bytes long, as we store blake3 digests. +fn validate_digest<E>(digest: &Vec<u8>, err: fn(usize) -> E) -> Result<(), E> { + if digest.len() != 32 { + return Err(err(digest.len())); + } + Ok(()) +} + +/// Parses a root node name. +/// +/// On success, this returns the parsed [StorePath]. +/// On error, it returns an error generated from the supplied constructor. +fn parse_node_name_root<E>( + name: &str, + err: fn(String, store_path::Error) -> E, +) -> Result<StorePath, E> { + match StorePath::from_string(name) { + Ok(np) => Ok(np), + Err(e) => Err(err(name.to_string(), e)), + } +} + +impl PathInfo { + /// validate performs some checks on the PathInfo struct, + /// Returning either a [StorePath] of the root node, or a + /// [ValidatePathInfoError]. + pub fn validate(&self) -> Result<StorePath, ValidatePathInfoError> { + // If there is a narinfo field populated, ensure the number of references there + // matches PathInfo.references count. + if let Some(narinfo) = &self.narinfo { + if narinfo.reference_names.len() != self.references.len() { + return Err(ValidatePathInfoError::InconsistentNumberOfReferences( + narinfo.reference_names.len(), + self.references.len(), + )); + } + } + // FUTUREWORK: parse references in reference_names. ensure they start + // with storeDir, and use the same digest as in self.references. + + // Ensure there is a (root) node present, and it properly parses to a [StorePath]. + let root_nix_path = match &self.node { + None => { + return Err(ValidatePathInfoError::NoNodePresent()); + } + Some(Node { node }) => match node { + None => { + return Err(ValidatePathInfoError::NoNodePresent()); + } + Some(node::Node::Directory(directory_node)) => { + // ensure the digest has the appropriate size. + validate_digest( + &directory_node.digest, + ValidatePathInfoError::InvalidDigestLen, + )?; + + // parse the name + parse_node_name_root( + &directory_node.name, + ValidatePathInfoError::InvalidNodeName, + )? + } + Some(node::Node::File(file_node)) => { + // ensure the digest has the appropriate size. + validate_digest(&file_node.digest, ValidatePathInfoError::InvalidDigestLen)?; + + // parse the name + parse_node_name_root(&file_node.name, ValidatePathInfoError::InvalidNodeName)? + } + Some(node::Node::Symlink(symlink_node)) => { + // parse the name + parse_node_name_root( + &symlink_node.name, + ValidatePathInfoError::InvalidNodeName, + )? + } + }, + }; + + // return the root nix path + Ok(root_nix_path) + } +} + +/// NamedNode is implemented for [FileNode], [DirectoryNode] and [SymlinkNode] +/// and [node::Node], so we can ask all of them for the name easily. +pub trait NamedNode { + fn get_name(&self) -> &str; +} + +impl NamedNode for &FileNode { + fn get_name(&self) -> &str { + self.name.as_str() + } +} + +impl NamedNode for &DirectoryNode { + fn get_name(&self) -> &str { + self.name.as_str() + } +} + +impl NamedNode for &SymlinkNode { + fn get_name(&self) -> &str { + self.name.as_str() + } +} + +impl NamedNode for node::Node { + fn get_name(&self) -> &str { + match self { + node::Node::File(node_file) => &node_file.name, + node::Node::Directory(node_directory) => &node_directory.name, + node::Node::Symlink(node_symlink) => &node_symlink.name, + } + } +} + +/// Accepts a name, and a mutable reference to the previous name. +/// If the passed name is larger than the previous one, the reference is updated. +/// If it's not, an error is returned. +fn update_if_lt_prev<'n>( + prev_name: &mut &'n str, + name: &'n str, +) -> Result<(), ValidateDirectoryError> { + if *name < **prev_name { + return Err(ValidateDirectoryError::WrongSorting(name.to_string())); + } + *prev_name = name; + Ok(()) +} + +/// Inserts the given name into a HashSet if it's not already in there. +/// If it is, an error is returned. +fn insert_once<'n>( + seen_names: &mut HashSet<&'n str>, + name: &'n str, +) -> Result<(), ValidateDirectoryError> { + if seen_names.get(name).is_some() { + return Err(ValidateDirectoryError::DuplicateName(name.to_string())); + } + seen_names.insert(name); + Ok(()) +} + +impl Directory { + /// The size of a directory is the number of all regular and symlink elements, + /// the number of directory elements, and their size fields. + pub fn size(&self) -> u32 { + self.files.len() as u32 + + self.symlinks.len() as u32 + + self + .directories + .iter() + .fold(0, |acc: u32, e| (acc + 1 + e.size)) + } + + /// Calculates the digest of a Directory, which is the blake3 hash of a + /// Directory protobuf message, serialized in protobuf canonical form. + pub fn digest(&self) -> B3Digest { + let mut hasher = blake3::Hasher::new(); + + let vec = hasher + .update(&self.encode_to_vec()) + .finalize() + .as_bytes() + .to_vec(); + B3Digest::from_vec(vec).unwrap() + } + + /// validate checks the directory for invalid data, such as: + /// - violations of name restrictions + /// - invalid digest lengths + /// - not properly sorted lists + /// - duplicate names in the three lists + pub fn validate(&self) -> Result<(), ValidateDirectoryError> { + let mut seen_names: HashSet<&str> = HashSet::new(); + + let mut last_directory_name: &str = ""; + let mut last_file_name: &str = ""; + let mut last_symlink_name: &str = ""; + + // check directories + for directory_node in &self.directories { + validate_node_name(&directory_node.name, ValidateDirectoryError::InvalidName)?; + validate_digest( + &directory_node.digest, + ValidateDirectoryError::InvalidDigestLen, + )?; + + update_if_lt_prev(&mut last_directory_name, directory_node.name.as_str())?; + insert_once(&mut seen_names, directory_node.name.as_str())?; + } + + // check files + for file_node in &self.files { + validate_node_name(&file_node.name, ValidateDirectoryError::InvalidName)?; + validate_digest(&file_node.digest, ValidateDirectoryError::InvalidDigestLen)?; + + update_if_lt_prev(&mut last_file_name, file_node.name.as_str())?; + insert_once(&mut seen_names, file_node.name.as_str())?; + } + + // check symlinks + for symlink_node in &self.symlinks { + validate_node_name(&symlink_node.name, ValidateDirectoryError::InvalidName)?; + + update_if_lt_prev(&mut last_symlink_name, symlink_node.name.as_str())?; + insert_once(&mut seen_names, symlink_node.name.as_str())?; + } + + Ok(()) + } + + /// Allows iterating over all three nodes ([DirectoryNode], [FileNode], + /// [SymlinkNode]) in an ordered fashion, as long as the individual lists + /// are sorted (which can be checked by the [Directory::validate]). + pub fn nodes(&self) -> DirectoryNodesIterator { + return DirectoryNodesIterator { + i_directories: self.directories.iter().peekable(), + i_files: self.files.iter().peekable(), + i_symlinks: self.symlinks.iter().peekable(), + }; + } +} + +/// Struct to hold the state of an iterator over all nodes of a Directory. +/// +/// Internally, this keeps peekable Iterators over all three lists of a +/// Directory message. +pub struct DirectoryNodesIterator<'a> { + // directory: &Directory, + i_directories: Peekable<std::slice::Iter<'a, DirectoryNode>>, + i_files: Peekable<std::slice::Iter<'a, FileNode>>, + i_symlinks: Peekable<std::slice::Iter<'a, SymlinkNode>>, +} + +/// looks at two elements implementing NamedNode, and returns true if "left +/// is smaller / comes first". +/// +/// Some(_) is preferred over None. +fn left_name_lt_right<A: NamedNode, B: NamedNode>(left: Option<&A>, right: Option<&B>) -> bool { + match left { + // if left is None, right always wins + None => false, + Some(left_inner) => { + // left is Some. + match right { + // left is Some, right is None - left wins. + None => true, + Some(right_inner) => { + // both are Some - compare the name. + return left_inner.get_name() < right_inner.get_name(); + } + } + } + } +} + +impl Iterator for DirectoryNodesIterator<'_> { + type Item = node::Node; + + // next returns the next node in the Directory. + // we peek at all three internal iterators, and pick the one with the + // smallest name, to ensure lexicographical ordering. + // The individual lists are already known to be sorted. + fn next(&mut self) -> Option<Self::Item> { + if left_name_lt_right(self.i_directories.peek(), self.i_files.peek()) { + // i_directories is still in the game, compare with symlinks + if left_name_lt_right(self.i_directories.peek(), self.i_symlinks.peek()) { + self.i_directories + .next() + .cloned() + .map(node::Node::Directory) + } else { + self.i_symlinks.next().cloned().map(node::Node::Symlink) + } + } else { + // i_files is still in the game, compare with symlinks + if left_name_lt_right(self.i_files.peek(), self.i_symlinks.peek()) { + self.i_files.next().cloned().map(node::Node::File) + } else { + self.i_symlinks.next().cloned().map(node::Node::Symlink) + } + } + } +} diff --git a/tvix/store/src/proto/sync_read_into_async_read.rs b/tvix/store/src/proto/sync_read_into_async_read.rs new file mode 100644 index 000000000000..0a0ef019781c --- /dev/null +++ b/tvix/store/src/proto/sync_read_into_async_read.rs @@ -0,0 +1,158 @@ +use bytes::Buf; +use core::task::Poll::Ready; +use futures::ready; +use futures::Future; +use std::io; +use std::io::Read; +use std::pin::Pin; +use std::sync::Arc; +use std::task::Context; +use std::task::Poll; +use tokio::io::AsyncRead; +use tokio::runtime::Handle; +use tokio::sync::Mutex; +use tokio::task::JoinHandle; + +#[derive(Debug)] +enum State<Buf: bytes::Buf + bytes::BufMut> { + Idle(Option<Buf>), + Busy(JoinHandle<(io::Result<usize>, Buf)>), +} + +use State::{Busy, Idle}; + +/// Use a [`SyncReadIntoAsyncRead`] to asynchronously read from a +/// synchronous API. +#[derive(Debug)] +pub struct SyncReadIntoAsyncRead<R: Read + Send, Buf: bytes::Buf + bytes::BufMut> { + state: Mutex<State<Buf>>, + reader: Arc<Mutex<R>>, + rt: Handle, +} + +impl<R: Read + Send, Buf: bytes::Buf + bytes::BufMut> SyncReadIntoAsyncRead<R, Buf> { + /// This must be called from within a Tokio runtime context, or else it will panic. + #[track_caller] + pub fn new(rt: Handle, reader: R) -> Self { + Self { + rt, + state: State::Idle(None).into(), + reader: Arc::new(reader.into()), + } + } + + /// This must be called from within a Tokio runtime context, or else it will panic. + pub fn new_with_reader(readable: R) -> Self { + Self::new(Handle::current(), readable) + } +} + +/// Repeats operations that are interrupted. +macro_rules! uninterruptibly { + ($e:expr) => {{ + loop { + match $e { + Err(ref e) if e.kind() == io::ErrorKind::Interrupted => {} + res => break res, + } + } + }}; +} + +impl< + R: Read + Send + 'static + std::marker::Unpin, + Buf: bytes::Buf + bytes::BufMut + Send + Default + std::marker::Unpin + 'static, + > AsyncRead for SyncReadIntoAsyncRead<R, Buf> +{ + fn poll_read( + self: Pin<&mut Self>, + cx: &mut Context<'_>, + dst: &mut tokio::io::ReadBuf<'_>, + ) -> Poll<io::Result<()>> { + let me = self.get_mut(); + // Do we need this mutex? + let state = me.state.get_mut(); + + loop { + match state { + Idle(ref mut buf_cell) => { + let mut buf = buf_cell.take().unwrap_or_default(); + + if buf.has_remaining() { + // Here, we will split the `buf` into `[..dst.remaining()... ; rest ]` + // The `rest` is stuffed into the `buf_cell` for further poll_read. + // The other is completely consumed into the unfilled destination. + // `rest` can be empty. + let mut adjusted_src = + buf.copy_to_bytes(std::cmp::min(buf.remaining(), dst.remaining())); + let copied_size = adjusted_src.remaining(); + adjusted_src.copy_to_slice(dst.initialize_unfilled_to(copied_size)); + dst.set_filled(copied_size); + *buf_cell = Some(buf); + return Ready(Ok(())); + } + + let reader = me.reader.clone(); + *state = Busy(me.rt.spawn_blocking(move || { + let result = uninterruptibly!(reader.blocking_lock().read( + // SAFETY: `reader.read` will *ONLY* write initialized bytes + // and never *READ* uninitialized bytes + // inside this buffer. + // + // Furthermore, casting the slice as `*mut [u8]` + // is safe because it has the same layout. + // + // Finally, the pointer obtained is valid and owned + // by `buf` only as we have a valid mutable reference + // to it, it is valid for write. + // + // Here, we copy an nightly API: https://doc.rust-lang.org/stable/src/core/mem/maybe_uninit.rs.html#994-998 + unsafe { + &mut *(buf.chunk_mut().as_uninit_slice_mut() + as *mut [std::mem::MaybeUninit<u8>] + as *mut [u8]) + } + )); + + if let Ok(n) = result { + // SAFETY: given we initialize `n` bytes, we can move `n` bytes + // forward. + unsafe { + buf.advance_mut(n); + } + } + + (result, buf) + })); + } + Busy(ref mut rx) => { + let (result, mut buf) = ready!(Pin::new(rx).poll(cx))?; + + match result { + Ok(n) => { + if n > 0 { + let remaining = std::cmp::min(n, dst.remaining()); + let mut adjusted_src = buf.copy_to_bytes(remaining); + adjusted_src.copy_to_slice(dst.initialize_unfilled_to(remaining)); + dst.advance(remaining); + } + *state = Idle(Some(buf)); + return Ready(Ok(())); + } + Err(e) => { + *state = Idle(None); + return Ready(Err(e)); + } + } + } + } + } + } +} + +impl<R: Read + Send, Buf: bytes::Buf + bytes::BufMut> From<R> for SyncReadIntoAsyncRead<R, Buf> { + /// This must be called from within a Tokio runtime context, or else it will panic. + fn from(value: R) -> Self { + Self::new_with_reader(value) + } +} diff --git a/tvix/store/src/proto/tests/directory.rs b/tvix/store/src/proto/tests/directory.rs new file mode 100644 index 000000000000..8d6ca7241d7a --- /dev/null +++ b/tvix/store/src/proto/tests/directory.rs @@ -0,0 +1,289 @@ +use crate::{ + proto::{Directory, DirectoryNode, FileNode, SymlinkNode, ValidateDirectoryError}, + B3Digest, +}; +use lazy_static::lazy_static; + +lazy_static! { + static ref DUMMY_DIGEST: [u8; 32] = [ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, + ]; +} +#[test] +fn size() { + { + let d = Directory::default(); + assert_eq!(d.size(), 0); + } + { + let d = Directory { + directories: vec![DirectoryNode { + name: String::from("foo"), + digest: DUMMY_DIGEST.to_vec(), + size: 0, + }], + ..Default::default() + }; + assert_eq!(d.size(), 1); + } + { + let d = Directory { + directories: vec![DirectoryNode { + name: String::from("foo"), + digest: DUMMY_DIGEST.to_vec(), + size: 4, + }], + ..Default::default() + }; + assert_eq!(d.size(), 5); + } + { + let d = Directory { + files: vec![FileNode { + name: String::from("foo"), + digest: DUMMY_DIGEST.to_vec(), + size: 42, + executable: false, + }], + ..Default::default() + }; + assert_eq!(d.size(), 1); + } + { + let d = Directory { + symlinks: vec![SymlinkNode { + name: String::from("foo"), + target: String::from("bar"), + }], + ..Default::default() + }; + assert_eq!(d.size(), 1); + } +} + +#[test] +fn digest() { + let d = Directory::default(); + + assert_eq!( + d.digest(), + B3Digest::from_vec(vec![ + 0xaf, 0x13, 0x49, 0xb9, 0xf5, 0xf9, 0xa1, 0xa6, 0xa0, 0x40, 0x4d, 0xea, 0x36, 0xdc, + 0xc9, 0x49, 0x9b, 0xcb, 0x25, 0xc9, 0xad, 0xc1, 0x12, 0xb7, 0xcc, 0x9a, 0x93, 0xca, + 0xe4, 0x1f, 0x32, 0x62 + ]) + .unwrap() + ) +} + +#[test] +fn validate_empty() { + let d = Directory::default(); + assert_eq!(d.validate(), Ok(())); +} + +#[test] +fn validate_invalid_names() { + { + let d = Directory { + directories: vec![DirectoryNode { + name: "".to_string(), + digest: DUMMY_DIGEST.to_vec(), + size: 42, + }], + ..Default::default() + }; + match d.validate().expect_err("must fail") { + ValidateDirectoryError::InvalidName(n) => { + assert_eq!(n, "") + } + _ => panic!("unexpected error"), + }; + } + + { + let d = Directory { + directories: vec![DirectoryNode { + name: ".".to_string(), + digest: DUMMY_DIGEST.to_vec(), + size: 42, + }], + ..Default::default() + }; + match d.validate().expect_err("must fail") { + ValidateDirectoryError::InvalidName(n) => { + assert_eq!(n, ".") + } + _ => panic!("unexpected error"), + }; + } + + { + let d = Directory { + files: vec![FileNode { + name: "..".to_string(), + digest: DUMMY_DIGEST.to_vec(), + size: 42, + executable: false, + }], + ..Default::default() + }; + match d.validate().expect_err("must fail") { + ValidateDirectoryError::InvalidName(n) => { + assert_eq!(n, "..") + } + _ => panic!("unexpected error"), + }; + } + + { + let d = Directory { + symlinks: vec![SymlinkNode { + name: "\x00".to_string(), + target: "foo".to_string(), + }], + ..Default::default() + }; + match d.validate().expect_err("must fail") { + ValidateDirectoryError::InvalidName(n) => { + assert_eq!(n, "\x00") + } + _ => panic!("unexpected error"), + }; + } + + { + let d = Directory { + symlinks: vec![SymlinkNode { + name: "foo/bar".to_string(), + target: "foo".to_string(), + }], + ..Default::default() + }; + match d.validate().expect_err("must fail") { + ValidateDirectoryError::InvalidName(n) => { + assert_eq!(n, "foo/bar") + } + _ => panic!("unexpected error"), + }; + } +} + +#[test] +fn validate_invalid_digest() { + let d = Directory { + directories: vec![DirectoryNode { + name: "foo".to_string(), + digest: vec![0x00, 0x42], // invalid length + size: 42, + }], + ..Default::default() + }; + match d.validate().expect_err("must fail") { + ValidateDirectoryError::InvalidDigestLen(n) => { + assert_eq!(n, 2) + } + _ => panic!("unexpected error"), + } +} + +#[test] +fn validate_sorting() { + // "b" comes before "a", bad. + { + let d = Directory { + directories: vec![ + DirectoryNode { + name: "b".to_string(), + digest: DUMMY_DIGEST.to_vec(), + size: 42, + }, + DirectoryNode { + name: "a".to_string(), + digest: DUMMY_DIGEST.to_vec(), + size: 42, + }, + ], + ..Default::default() + }; + match d.validate().expect_err("must fail") { + ValidateDirectoryError::WrongSorting(s) => { + assert_eq!(s, "a".to_string()); + } + _ => panic!("unexpected error"), + } + } + + // "a" exists twice, bad. + { + let d = Directory { + directories: vec![ + DirectoryNode { + name: "a".to_string(), + digest: DUMMY_DIGEST.to_vec(), + size: 42, + }, + DirectoryNode { + name: "a".to_string(), + digest: DUMMY_DIGEST.to_vec(), + size: 42, + }, + ], + ..Default::default() + }; + match d.validate().expect_err("must fail") { + ValidateDirectoryError::DuplicateName(s) => { + assert_eq!(s, "a".to_string()); + } + _ => panic!("unexpected error"), + } + } + + // "a" comes before "b", all good. + { + let d = Directory { + directories: vec![ + DirectoryNode { + name: "a".to_string(), + digest: DUMMY_DIGEST.to_vec(), + size: 42, + }, + DirectoryNode { + name: "b".to_string(), + digest: DUMMY_DIGEST.to_vec(), + size: 42, + }, + ], + ..Default::default() + }; + + d.validate().expect("validate shouldn't error"); + } + + // [b, c] and [a] are both properly sorted. + { + let d = Directory { + directories: vec![ + DirectoryNode { + name: "b".to_string(), + digest: DUMMY_DIGEST.to_vec(), + size: 42, + }, + DirectoryNode { + name: "c".to_string(), + digest: DUMMY_DIGEST.to_vec(), + size: 42, + }, + ], + symlinks: vec![SymlinkNode { + name: "a".to_string(), + target: "foo".to_string(), + }], + ..Default::default() + }; + + d.validate().expect("validate shouldn't error"); + } +} diff --git a/tvix/store/src/proto/tests/directory_nodes_iterator.rs b/tvix/store/src/proto/tests/directory_nodes_iterator.rs new file mode 100644 index 000000000000..9a283f72bd45 --- /dev/null +++ b/tvix/store/src/proto/tests/directory_nodes_iterator.rs @@ -0,0 +1,80 @@ +use crate::proto::node::Node; +use crate::proto::Directory; +use crate::proto::DirectoryNode; +use crate::proto::FileNode; +use crate::proto::SymlinkNode; + +#[test] +fn iterator() { + let d = Directory { + directories: vec![ + DirectoryNode { + name: "c".to_string(), + ..DirectoryNode::default() + }, + DirectoryNode { + name: "d".to_string(), + ..DirectoryNode::default() + }, + DirectoryNode { + name: "h".to_string(), + ..DirectoryNode::default() + }, + DirectoryNode { + name: "l".to_string(), + ..DirectoryNode::default() + }, + ], + files: vec![ + FileNode { + name: "b".to_string(), + ..FileNode::default() + }, + FileNode { + name: "e".to_string(), + ..FileNode::default() + }, + FileNode { + name: "g".to_string(), + ..FileNode::default() + }, + FileNode { + name: "j".to_string(), + ..FileNode::default() + }, + ], + symlinks: vec![ + SymlinkNode { + name: "a".to_string(), + ..SymlinkNode::default() + }, + SymlinkNode { + name: "f".to_string(), + ..SymlinkNode::default() + }, + SymlinkNode { + name: "i".to_string(), + ..SymlinkNode::default() + }, + SymlinkNode { + name: "k".to_string(), + ..SymlinkNode::default() + }, + ], + }; + + let mut node_names: Vec<String> = vec![]; + + for node in d.nodes() { + match node { + Node::Directory(n) => node_names.push(n.name), + Node::File(n) => node_names.push(n.name), + Node::Symlink(n) => node_names.push(n.name), + }; + } + + assert_eq!( + vec!["a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l"], + node_names + ); +} diff --git a/tvix/store/src/proto/tests/grpc_blobservice.rs b/tvix/store/src/proto/tests/grpc_blobservice.rs new file mode 100644 index 000000000000..02e04e7d723f --- /dev/null +++ b/tvix/store/src/proto/tests/grpc_blobservice.rs @@ -0,0 +1,102 @@ +use crate::blobservice::BlobService; +use crate::proto::blob_service_server::BlobService as GRPCBlobService; +use crate::proto::{BlobChunk, GRPCBlobServiceWrapper, ReadBlobRequest, StatBlobRequest}; +use crate::tests::fixtures::{BLOB_A, BLOB_A_DIGEST}; +use crate::tests::utils::gen_blob_service; +use tokio_stream::StreamExt; + +fn gen_grpc_blob_service( +) -> GRPCBlobServiceWrapper<impl BlobService + Send + Sync + Clone + 'static> { + let blob_service = gen_blob_service(); + GRPCBlobServiceWrapper::from(blob_service) +} + +/// Trying to read a non-existent blob should return a not found error. +#[tokio::test] +async fn not_found_read() { + let service = gen_grpc_blob_service(); + + let resp = service + .read(tonic::Request::new(ReadBlobRequest { + digest: BLOB_A_DIGEST.to_vec(), + })) + .await; + + // We can't use unwrap_err here, because the Ok value doesn't implement + // debug. + if let Err(e) = resp { + assert_eq!(e.code(), tonic::Code::NotFound); + } else { + panic!("resp is not err") + } +} + +/// Trying to stat a non-existent blob should return a not found error. +#[tokio::test] +async fn not_found_stat() { + let service = gen_grpc_blob_service(); + + let resp = service + .stat(tonic::Request::new(StatBlobRequest { + digest: BLOB_A_DIGEST.to_vec(), + ..Default::default() + })) + .await + .expect_err("must fail"); + + // The resp should be a status with Code::NotFound + assert_eq!(resp.code(), tonic::Code::NotFound); +} + +/// Put a blob in the store, get it back. +#[tokio::test] +async fn put_read_stat() { + let service = gen_grpc_blob_service(); + + // Send blob A. + let put_resp = service + .put(tonic_mock::streaming_request(vec![BlobChunk { + data: BLOB_A.clone(), + }])) + .await + .expect("must succeed") + .into_inner(); + + assert_eq!(BLOB_A_DIGEST.to_vec(), put_resp.digest); + + // Stat for the digest of A. + // We currently don't ask for more granular chunking data, as we don't + // expose it yet. + let _resp = service + .stat(tonic::Request::new(StatBlobRequest { + digest: BLOB_A_DIGEST.to_vec(), + ..Default::default() + })) + .await + .expect("must succeed") + .into_inner(); + + // Read the blob. It should return the same data. + let resp = service + .read(tonic::Request::new(ReadBlobRequest { + digest: BLOB_A_DIGEST.to_vec(), + })) + .await; + + let mut rx = resp.ok().unwrap().into_inner(); + + // the stream should contain one element, a BlobChunk with the same contents as BLOB_A. + let item = rx + .next() + .await + .expect("must be some") + .expect("must succeed"); + + assert_eq!(BLOB_A.to_vec(), item.data); + + // … and no more elements + assert!(rx.next().await.is_none()); + + // TODO: we rely here on the blob being small enough to not get broken up into multiple chunks. + // Test with some bigger blob too +} diff --git a/tvix/store/src/proto/tests/grpc_directoryservice.rs b/tvix/store/src/proto/tests/grpc_directoryservice.rs new file mode 100644 index 000000000000..069e82f6463e --- /dev/null +++ b/tvix/store/src/proto/tests/grpc_directoryservice.rs @@ -0,0 +1,241 @@ +use crate::directoryservice::DirectoryService; +use crate::proto::directory_service_server::DirectoryService as GRPCDirectoryService; +use crate::proto::get_directory_request::ByWhat; +use crate::proto::{Directory, DirectoryNode, SymlinkNode}; +use crate::proto::{GRPCDirectoryServiceWrapper, GetDirectoryRequest}; +use crate::tests::fixtures::{DIRECTORY_A, DIRECTORY_B, DIRECTORY_C}; +use crate::tests::utils::gen_directory_service; +use tokio_stream::StreamExt; +use tonic::Status; + +fn gen_grpc_service( +) -> GRPCDirectoryServiceWrapper<impl DirectoryService + Send + Sync + Clone + 'static> { + let directory_service = gen_directory_service(); + GRPCDirectoryServiceWrapper::from(directory_service) +} + +/// Send the specified GetDirectoryRequest. +/// Returns an error in the case of an error response, or an error in one of +// the items in the stream, or a Vec<Directory> in the case of a successful +/// request. +async fn get_directories<S: GRPCDirectoryService>( + svc: &S, + get_directory_request: GetDirectoryRequest, +) -> Result<Vec<Directory>, Status> { + let resp = svc.get(tonic::Request::new(get_directory_request)).await; + + // if the response is an error itself, return the error, otherwise unpack + let stream = match resp { + Ok(resp) => resp, + Err(status) => return Err(status), + } + .into_inner(); + + let directory_results: Vec<Result<Directory, Status>> = stream.collect().await; + + // turn Vec<Result<Directory, Status> into Result<Vec<Directory>,Status> + directory_results.into_iter().collect() +} + +/// Trying to get a non-existent Directory should return a not found error. +#[tokio::test] +async fn not_found() { + let service = gen_grpc_service(); + + let resp = service + .get(tonic::Request::new(GetDirectoryRequest { + by_what: Some(ByWhat::Digest(DIRECTORY_A.digest().to_vec())), + ..Default::default() + })) + .await; + + let mut rx = resp.expect("must succeed").into_inner().into_inner(); + + // The stream should contain one element, an error with Code::NotFound. + let item = rx + .recv() + .await + .expect("must be some") + .expect_err("must be err"); + assert_eq!(item.code(), tonic::Code::NotFound); + + // … and nothing else + assert!(rx.recv().await.is_none()); +} + +/// Put a Directory into the store, get it back. +#[tokio::test] +async fn put_get() { + let service = gen_grpc_service(); + + let streaming_request = tonic_mock::streaming_request(vec![DIRECTORY_A.clone()]); + let put_resp = service + .put(streaming_request) + .await + .expect("must succeed") + .into_inner(); + + // the sent root_digest should match the calculated digest + assert_eq!(put_resp.root_digest, DIRECTORY_A.digest().to_vec()); + + // get it back + let items = get_directories( + &service, + GetDirectoryRequest { + by_what: Some(ByWhat::Digest(DIRECTORY_A.digest().to_vec())), + ..Default::default() + }, + ) + .await + .expect("must not error"); + + assert_eq!(vec![DIRECTORY_A.clone()], items); +} + +/// Put multiple Directories into the store, and get them back +#[tokio::test] +async fn put_get_multiple() { + let service = gen_grpc_service(); + + // sending "b" (which refers to "a") without sending "a" first should fail. + let put_resp = service + .put(tonic_mock::streaming_request(vec![DIRECTORY_B.clone()])) + .await + .expect_err("must fail"); + + assert_eq!(tonic::Code::InvalidArgument, put_resp.code()); + + // sending "a", then "b" should succeed, and the response should contain the digest of b. + let put_resp = service + .put(tonic_mock::streaming_request(vec![ + DIRECTORY_A.clone(), + DIRECTORY_B.clone(), + ])) + .await + .expect("must succeed"); + + assert_eq!( + DIRECTORY_B.digest().to_vec(), + put_resp.into_inner().root_digest + ); + + // now, request b, first in non-recursive mode. + let items = get_directories( + &service, + GetDirectoryRequest { + recursive: false, + by_what: Some(ByWhat::Digest(DIRECTORY_B.digest().to_vec())), + }, + ) + .await + .expect("must not error"); + + // We expect to only get b. + assert_eq!(vec![DIRECTORY_B.clone()], items); + + // now, request b, but in recursive mode. + let items = get_directories( + &service, + GetDirectoryRequest { + recursive: true, + by_what: Some(ByWhat::Digest(DIRECTORY_B.digest().to_vec())), + }, + ) + .await + .expect("must not error"); + + // We expect to get b, and then a, because that's how we traverse down. + assert_eq!(vec![DIRECTORY_B.clone(), DIRECTORY_A.clone()], items); +} + +/// Put multiple Directories into the store, and omit duplicates. +#[tokio::test] +async fn put_get_dedup() { + let service = gen_grpc_service(); + + // Send "A", then "C", which refers to "A" two times + // Pretend we're a dumb client sending A twice. + let put_resp = service + .put(tonic_mock::streaming_request(vec![ + DIRECTORY_A.clone(), + DIRECTORY_A.clone(), + DIRECTORY_C.clone(), + ])) + .await + .expect("must succeed"); + + assert_eq!( + DIRECTORY_C.digest().to_vec(), + put_resp.into_inner().root_digest + ); + + // Ask for "C" recursively. We expect to only get "A" once, as there's no point sending it twice. + let items = get_directories( + &service, + GetDirectoryRequest { + recursive: true, + by_what: Some(ByWhat::Digest(DIRECTORY_C.digest().to_vec())), + }, + ) + .await + .expect("must not error"); + + // We expect to get C, and then A (once, as the second A has been deduplicated). + assert_eq!(vec![DIRECTORY_C.clone(), DIRECTORY_A.clone()], items); +} + +/// Trying to upload a Directory failing validation should fail. +#[tokio::test] +async fn put_reject_failed_validation() { + let service = gen_grpc_service(); + + // construct a broken Directory message that fails validation + let broken_directory = Directory { + symlinks: vec![SymlinkNode { + name: "".to_string(), + target: "doesntmatter".to_string(), + }], + ..Default::default() + }; + assert!(broken_directory.validate().is_err()); + + // send it over, it must fail + let put_resp = service + .put(tonic_mock::streaming_request(vec![broken_directory])) + .await + .expect_err("must fail"); + + assert_eq!(put_resp.code(), tonic::Code::InvalidArgument); +} + +/// Trying to upload a Directory with wrong size should fail. +#[tokio::test] +async fn put_reject_wrong_size() { + let service = gen_grpc_service(); + + // Construct a directory referring to DIRECTORY_A, but with wrong size. + let broken_parent_directory = Directory { + directories: vec![DirectoryNode { + name: "foo".to_string(), + digest: DIRECTORY_A.digest().to_vec(), + size: 42, + }], + ..Default::default() + }; + // Make sure we got the size wrong. + assert_ne!( + broken_parent_directory.directories[0].size, + DIRECTORY_A.size() + ); + + // now upload both (first A, then the broken parent). This must fail. + let put_resp = service + .put(tonic_mock::streaming_request(vec![ + DIRECTORY_A.clone(), + broken_parent_directory, + ])) + .await + .expect_err("must fail"); + + assert_eq!(put_resp.code(), tonic::Code::InvalidArgument); +} diff --git a/tvix/store/src/proto/tests/grpc_pathinfoservice.rs b/tvix/store/src/proto/tests/grpc_pathinfoservice.rs new file mode 100644 index 000000000000..11cab2c264cc --- /dev/null +++ b/tvix/store/src/proto/tests/grpc_pathinfoservice.rs @@ -0,0 +1,67 @@ +use crate::nar::NonCachingNARCalculationService; +use crate::proto::get_path_info_request::ByWhat::ByOutputHash; +use crate::proto::node::Node::Symlink; +use crate::proto::path_info_service_server::PathInfoService as GRPCPathInfoService; +use crate::proto::GRPCPathInfoServiceWrapper; +use crate::proto::PathInfo; +use crate::proto::{GetPathInfoRequest, Node, SymlinkNode}; +use crate::tests::fixtures::DUMMY_OUTPUT_HASH; +use crate::tests::utils::{gen_blob_service, gen_directory_service, gen_pathinfo_service}; +use tonic::Request; + +/// generates a GRPCPathInfoService out of blob, directory and pathinfo services. +/// +/// We only interact with it via the PathInfo GRPC interface. +/// It uses the NonCachingNARCalculationService NARCalculationService to +/// calculate NARs. +fn gen_grpc_service() -> impl GRPCPathInfoService { + GRPCPathInfoServiceWrapper::new( + gen_pathinfo_service(), + NonCachingNARCalculationService::new(gen_blob_service(), gen_directory_service()), + ) +} + +/// Trying to get a non-existent PathInfo should return a not found error. +#[tokio::test] +async fn not_found() { + let service = gen_grpc_service(); + + let resp = service + .get(Request::new(GetPathInfoRequest { + by_what: Some(ByOutputHash(DUMMY_OUTPUT_HASH.to_vec())), + })) + .await; + + let resp = resp.expect_err("must fail"); + assert_eq!(resp.code(), tonic::Code::NotFound); +} + +/// Put a PathInfo into the store, get it back. +#[tokio::test] +async fn put_get() { + let service = gen_grpc_service(); + + let path_info = PathInfo { + node: Some(Node { + node: Some(Symlink(SymlinkNode { + name: "00000000000000000000000000000000-foo".to_string(), + target: "doesntmatter".to_string(), + })), + }), + ..Default::default() + }; + + let resp = service.put(Request::new(path_info.clone())).await; + + assert!(resp.is_ok()); + assert_eq!(resp.expect("must succeed").into_inner(), path_info); + + let resp = service + .get(Request::new(GetPathInfoRequest { + by_what: Some(ByOutputHash(DUMMY_OUTPUT_HASH.to_vec())), + })) + .await; + + assert!(resp.is_ok()); + assert_eq!(resp.expect("must succeed").into_inner(), path_info); +} diff --git a/tvix/store/src/proto/tests/mod.rs b/tvix/store/src/proto/tests/mod.rs new file mode 100644 index 000000000000..0a96ea3a0d59 --- /dev/null +++ b/tvix/store/src/proto/tests/mod.rs @@ -0,0 +1,6 @@ +mod directory; +mod directory_nodes_iterator; +mod grpc_blobservice; +mod grpc_directoryservice; +mod grpc_pathinfoservice; +mod pathinfo; diff --git a/tvix/store/src/proto/tests/pathinfo.rs b/tvix/store/src/proto/tests/pathinfo.rs new file mode 100644 index 000000000000..54a76fc6c554 --- /dev/null +++ b/tvix/store/src/proto/tests/pathinfo.rs @@ -0,0 +1,207 @@ +use crate::proto::{self, Node, PathInfo, ValidatePathInfoError}; +use lazy_static::lazy_static; +use nix_compat::store_path::{self, StorePath}; +use test_case::test_case; + +lazy_static! { + static ref DUMMY_DIGEST: Vec<u8> = vec![ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, + ]; + static ref DUMMY_DIGEST_2: Vec<u8> = vec![ + 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, + ]; +} + +const DUMMY_NAME: &str = "00000000000000000000000000000000-dummy"; + +#[test_case( + None, + Err(ValidatePathInfoError::NoNodePresent()) ; + "No node" +)] +#[test_case( + Some(Node { node: None }), + Err(ValidatePathInfoError::NoNodePresent()); + "No node 2" +)] +fn validate_no_node( + t_node: Option<proto::Node>, + t_result: Result<StorePath, ValidatePathInfoError>, +) { + // construct the PathInfo object + let p = PathInfo { + node: t_node, + ..Default::default() + }; + assert_eq!(t_result, p.validate()); +} + +#[test_case( + proto::DirectoryNode { + name: DUMMY_NAME.to_string(), + digest: DUMMY_DIGEST.to_vec(), + size: 0, + }, + Ok(StorePath::from_string(DUMMY_NAME).expect("must succeed")); + "ok" +)] +#[test_case( + proto::DirectoryNode { + name: DUMMY_NAME.to_string(), + digest: vec![], + size: 0, + }, + Err(ValidatePathInfoError::InvalidDigestLen(0)); + "invalid digest length" +)] +#[test_case( + proto::DirectoryNode { + name: "invalid".to_string(), + digest: DUMMY_DIGEST.to_vec(), + size: 0, + }, + Err(ValidatePathInfoError::InvalidNodeName( + "invalid".to_string(), + store_path::Error::InvalidName(store_path::NameError::InvalidName("".to_string())) + )); + "invalid node name" +)] +fn validate_directory( + t_directory_node: proto::DirectoryNode, + t_result: Result<StorePath, ValidatePathInfoError>, +) { + // construct the PathInfo object + let p = PathInfo { + node: Some(Node { + node: Some(proto::node::Node::Directory(t_directory_node)), + }), + ..Default::default() + }; + assert_eq!(t_result, p.validate()); +} + +#[test_case( + proto::FileNode { + name: DUMMY_NAME.to_string(), + digest: DUMMY_DIGEST.to_vec(), + size: 0, + executable: false, + }, + Ok(StorePath::from_string(DUMMY_NAME).expect("must succeed")); + "ok" +)] +#[test_case( + proto::FileNode { + name: DUMMY_NAME.to_string(), + digest: vec![], + ..Default::default() + }, + Err(ValidatePathInfoError::InvalidDigestLen(0)); + "invalid digest length" +)] +#[test_case( + proto::FileNode { + name: "invalid".to_string(), + digest: DUMMY_DIGEST.to_vec(), + ..Default::default() + }, + Err(ValidatePathInfoError::InvalidNodeName( + "invalid".to_string(), + store_path::Error::InvalidName(store_path::NameError::InvalidName("".to_string())) + )); + "invalid node name" +)] +fn validate_file(t_file_node: proto::FileNode, t_result: Result<StorePath, ValidatePathInfoError>) { + // construct the PathInfo object + let p = PathInfo { + node: Some(Node { + node: Some(proto::node::Node::File(t_file_node)), + }), + ..Default::default() + }; + assert_eq!(t_result, p.validate()); +} + +#[test_case( + proto::SymlinkNode { + name: DUMMY_NAME.to_string(), + ..Default::default() + }, + Ok(StorePath::from_string(DUMMY_NAME).expect("must succeed")); + "ok" +)] +#[test_case( + proto::SymlinkNode { + name: "invalid".to_string(), + ..Default::default() + }, + Err(ValidatePathInfoError::InvalidNodeName( + "invalid".to_string(), + store_path::Error::InvalidName(store_path::NameError::InvalidName("".to_string())) + )); + "invalid node name" +)] +fn validate_symlink( + t_symlink_node: proto::SymlinkNode, + t_result: Result<StorePath, ValidatePathInfoError>, +) { + // construct the PathInfo object + let p = PathInfo { + node: Some(Node { + node: Some(proto::node::Node::Symlink(t_symlink_node)), + }), + ..Default::default() + }; + assert_eq!(t_result, p.validate()); +} + +#[test] +fn validate_references() { + // create a PathInfo without narinfo field. + let path_info = PathInfo { + node: Some(Node { + node: Some(proto::node::Node::Directory(proto::DirectoryNode { + name: DUMMY_NAME.to_string(), + digest: DUMMY_DIGEST.to_vec(), + size: 0, + })), + }), + references: vec![DUMMY_DIGEST_2.to_vec()], + narinfo: None, + }; + assert!(path_info.validate().is_ok()); + + // create a PathInfo with a narinfo field, but an inconsistent set of references + let path_info_with_narinfo_missing_refs = PathInfo { + narinfo: Some(proto::NarInfo { + nar_size: 0, + nar_sha256: DUMMY_DIGEST.to_vec(), + signatures: vec![], + reference_names: vec![], + }), + ..path_info.clone() + }; + match path_info_with_narinfo_missing_refs + .validate() + .expect_err("must_fail") + { + ValidatePathInfoError::InconsistentNumberOfReferences(_, _) => {} + _ => panic!("unexpected error"), + }; + + // create a pathinfo with the correct number of references, should suceed + let path_info_with_narinfo = PathInfo { + narinfo: Some(proto::NarInfo { + nar_size: 0, + nar_sha256: DUMMY_DIGEST.to_vec(), + signatures: vec![], + reference_names: vec![format!("/nix/store/{}", DUMMY_NAME)], + }), + ..path_info + }; + assert!(path_info_with_narinfo.validate().is_ok()); +} diff --git a/tvix/store/src/store_io.rs b/tvix/store/src/store_io.rs new file mode 100644 index 000000000000..fb46204e505f --- /dev/null +++ b/tvix/store/src/store_io.rs @@ -0,0 +1,358 @@ +//! This module provides an implementation of EvalIO. +//! +//! It can be used by the tvix evalutator to talk to a tvix store. + +use data_encoding::BASE64; +use nix_compat::{ + nixhash::{HashAlgo, NixHash, NixHashWithMode}, + store_path::{build_regular_ca_path, StorePath}, +}; +use smol_str::SmolStr; +use std::{io, path::Path, path::PathBuf}; +use tracing::{error, instrument, warn}; +use tvix_eval::{EvalIO, FileType, StdIO}; + +use crate::{ + blobservice::BlobService, + directoryservice::{self, DirectoryService}, + import, + nar::NARCalculationService, + pathinfoservice::PathInfoService, + proto::NamedNode, + B3Digest, +}; + +/// Implements [EvalIO], asking given [PathInfoService], [DirectoryService] +/// and [BlobService]. +/// +/// In case the given path does not exist in these stores, we ask StdIO. +/// This is to both cover cases of syntactically valid store paths, that exist +/// on the filesystem (still managed by Nix), as well as being able to read +/// files outside store paths. +pub struct TvixStoreIO< + BS: BlobService, + DS: DirectoryService, + PS: PathInfoService, + NCS: NARCalculationService, +> { + blob_service: BS, + directory_service: DS, + path_info_service: PS, + nar_calculation_service: NCS, + std_io: StdIO, +} + +impl<BS: BlobService, DS: DirectoryService, PS: PathInfoService, NCS: NARCalculationService> + TvixStoreIO<BS, DS, PS, NCS> +{ + pub fn new( + blob_service: BS, + directory_service: DS, + path_info_service: PS, + nar_calculation_service: NCS, + ) -> Self { + Self { + blob_service, + directory_service, + path_info_service, + nar_calculation_service, + std_io: StdIO {}, + } + } + + /// for a given [StorePath] and additional [Path] inside the store path, + /// look up the [PathInfo], and if it exists, traverse the directory structure to + /// return the [crate::proto::node::Node] specified by `sub_path`. + #[instrument(skip(self), ret, err)] + fn store_path_to_root_node( + &self, + store_path: &StorePath, + sub_path: &Path, + ) -> Result<Option<crate::proto::node::Node>, crate::Error> { + let path_info = { + match self.path_info_service.get(store_path.digest)? { + // If there's no PathInfo found, early exit + None => return Ok(None), + Some(path_info) => path_info, + } + }; + + let root_node = { + match path_info.node { + None => { + warn!( + "returned PathInfo {:?} node is None, this shouldn't happen.", + &path_info + ); + return Ok(None); + } + Some(root_node) => match root_node.node { + None => { + warn!("node for {:?} is None, this shouldn't happen.", &root_node); + return Ok(None); + } + Some(root_node) => root_node, + }, + } + }; + + directoryservice::traverse_to(&self.directory_service, root_node, sub_path) + } + + /// Imports a given path on the filesystem into the store, and returns the + /// [crate::proto::PathInfo] describing the path, that was sent to + /// [PathInfoService]. + /// While not part of the [EvalIO], it's still useful for clients who + /// care about the [PathInfo]. + #[instrument(skip(self), ret, err)] + pub fn import_path_with_pathinfo( + &self, + path: &std::path::Path, + ) -> Result<crate::proto::PathInfo, io::Error> { + // Call [import::ingest_path], which will walk over the given path and return a root_node. + let root_node = import::ingest_path(&self.blob_service, &self.directory_service, path) + .expect("error during import_path"); + + // Render the NAR + let (nar_size, nar_sha256) = self + .nar_calculation_service + .calculate_nar(&root_node) + .expect("error during nar calculation"); // TODO: handle error + + // For given NAR sha256 digest and name, return the new [StorePath] this would have. + let nar_hash_with_mode = + NixHashWithMode::Recursive(NixHash::new(HashAlgo::Sha256, nar_sha256.to_vec())); + + let name = path + .file_name() + .expect("path must not be ..") + .to_str() + .expect("path must be valid unicode"); + + let output_path = + build_regular_ca_path(name, &nar_hash_with_mode, Vec::<String>::new(), false).unwrap(); + + // assemble a new root_node with a name that is derived from the nar hash. + let renamed_root_node = { + let name = output_path.to_string(); + + match root_node { + crate::proto::node::Node::Directory(n) => { + crate::proto::node::Node::Directory(crate::proto::DirectoryNode { name, ..n }) + } + crate::proto::node::Node::File(n) => { + crate::proto::node::Node::File(crate::proto::FileNode { name, ..n }) + } + crate::proto::node::Node::Symlink(n) => { + crate::proto::node::Node::Symlink(crate::proto::SymlinkNode { name, ..n }) + } + } + }; + + // assemble the [crate::proto::PathInfo] object. + let path_info = crate::proto::PathInfo { + node: Some(crate::proto::Node { + node: Some(renamed_root_node), + }), + // There's no reference scanning on path contents ingested like this. + references: vec![], + narinfo: Some(crate::proto::NarInfo { + nar_size, + nar_sha256: nar_sha256.to_vec(), + signatures: vec![], + reference_names: vec![], + // TODO: narinfo for talosctl.src contains `CA: fixed:r:sha256:1x13j5hy75221bf6kz7cpgld9vgic6bqx07w5xjs4pxnksj6lxb6` + // do we need this anywhere? + }), + }; + + // put into [PathInfoService], and return the PathInfo that we get back + // from there (it might contain additional signatures). + let path_info = self.path_info_service.put(path_info)?; + + Ok(path_info) + } +} + +/// For given NAR sha256 digest and name, return the new [StorePath] this would have. +#[instrument(skip(nar_sha256_digest), ret, fields(nar_sha256_digest=BASE64.encode(nar_sha256_digest)))] +fn calculate_nar_based_store_path(nar_sha256_digest: &[u8; 32], name: &str) -> StorePath { + let nar_hash_with_mode = + NixHashWithMode::Recursive(NixHash::new(HashAlgo::Sha256, nar_sha256_digest.to_vec())); + + build_regular_ca_path(name, &nar_hash_with_mode, Vec::<String>::new(), false).unwrap() +} + +impl<BS: BlobService, DS: DirectoryService, PS: PathInfoService, NCS: NARCalculationService> EvalIO + for TvixStoreIO<BS, DS, PS, NCS> +{ + #[instrument(skip(self), ret, err)] + fn path_exists(&self, path: &Path) -> Result<bool, io::Error> { + if let Ok((store_path, sub_path)) = + StorePath::from_absolute_path_full(&path.to_string_lossy()) + { + if self + .store_path_to_root_node(&store_path, &sub_path)? + .is_some() + { + Ok(true) + } else { + // As tvix-store doesn't manage /nix/store on the filesystem, + // we still need to also ask self.std_io here. + self.std_io.path_exists(path) + } + } else { + // The store path is no store path, so do regular StdIO. + self.std_io.path_exists(path) + } + } + + #[instrument(skip(self), ret, err)] + fn read_to_string(&self, path: &Path) -> Result<String, io::Error> { + if let Ok((store_path, sub_path)) = + StorePath::from_absolute_path_full(&path.to_string_lossy()) + { + if let Some(node) = self.store_path_to_root_node(&store_path, &sub_path)? { + // depending on the node type, treat read_to_string differently + match node { + crate::proto::node::Node::Directory(_) => { + // This would normally be a io::ErrorKind::IsADirectory (still unstable) + Err(io::Error::new( + io::ErrorKind::Unsupported, + "tried to read directory at {path} to string", + )) + } + crate::proto::node::Node::File(file_node) => { + let digest = + B3Digest::from_vec(file_node.digest.clone()).map_err(|_e| { + error!( + file_node = ?file_node, + "invalid digest" + ); + io::Error::new( + io::ErrorKind::InvalidData, + format!("invalid digest length in file node: {:?}", file_node), + ) + })?; + + let reader = { + let resp = self.blob_service.open_read(&digest)?; + match resp { + Some(blob_reader) => blob_reader, + None => { + error!( + blob.digest = %digest, + "blob not found", + ); + Err(io::Error::new( + io::ErrorKind::NotFound, + format!("blob {} not found", &digest), + ))? + } + } + }; + + io::read_to_string(reader) + } + crate::proto::node::Node::Symlink(_symlink_node) => Err(io::Error::new( + io::ErrorKind::Unsupported, + "read_to_string for symlinks is unsupported", + ))?, + } + } else { + // As tvix-store doesn't manage /nix/store on the filesystem, + // we still need to also ask self.std_io here. + self.std_io.read_to_string(path) + } + } else { + // The store path is no store path, so do regular StdIO. + self.std_io.read_to_string(path) + } + } + + #[instrument(skip(self), ret, err)] + fn read_dir(&self, path: &Path) -> Result<Vec<(SmolStr, FileType)>, io::Error> { + if let Ok((store_path, sub_path)) = + StorePath::from_absolute_path_full(&path.to_string_lossy()) + { + if let Some(node) = self.store_path_to_root_node(&store_path, &sub_path)? { + match node { + crate::proto::node::Node::Directory(directory_node) => { + // fetch the Directory itself. + let digest = + B3Digest::from_vec(directory_node.digest.clone()).map_err(|_e| { + io::Error::new( + io::ErrorKind::InvalidData, + format!( + "invalid digest length in directory node: {:?}", + directory_node + ), + ) + })?; + + if let Some(directory) = self.directory_service.get(&digest)? { + let mut children: Vec<(SmolStr, FileType)> = Vec::new(); + for node in directory.nodes() { + children.push(match node { + crate::proto::node::Node::Directory(e) => { + (e.name.into(), FileType::Directory) + } + crate::proto::node::Node::File(e) => { + (e.name.into(), FileType::Regular) + } + crate::proto::node::Node::Symlink(e) => { + (e.name.into(), FileType::Symlink) + } + }) + } + Ok(children) + } else { + // If we didn't get the directory node that's linked, that's a store inconsistency! + error!( + directory.digest = %digest, + path = ?path, + "directory not found", + ); + Err(io::Error::new( + io::ErrorKind::NotFound, + format!("directory {digest} does not exist"), + ))? + } + } + crate::proto::node::Node::File(_file_node) => { + // This would normally be a io::ErrorKind::NotADirectory (still unstable) + Err(io::Error::new( + io::ErrorKind::Unsupported, + "tried to readdir path {:?}, which is a file", + ))? + } + crate::proto::node::Node::Symlink(_symlink_node) => Err(io::Error::new( + io::ErrorKind::Unsupported, + "read_dir for symlinks is unsupported", + ))?, + } + } else { + self.std_io.read_dir(path) + } + } else { + self.std_io.read_dir(path) + } + } + + #[instrument(skip(self), ret, err)] + fn import_path(&self, path: &std::path::Path) -> Result<PathBuf, std::io::Error> { + let path_info = self.import_path_with_pathinfo(path)?; + + // from the [PathInfo], extract the store path (as string). + let mut path = PathBuf::from(nix_compat::store_path::STORE_DIR_WITH_SLASH); + path.push(path_info.node.unwrap().node.unwrap().get_name()); + + // and return it + Ok(path) + } + + #[instrument(skip(self), ret)] + fn store_dir(&self) -> Option<String> { + Some("/nix/store".to_string()) + } +} diff --git a/tvix/store/src/tests/fixtures.rs b/tvix/store/src/tests/fixtures.rs new file mode 100644 index 000000000000..934d9e4c5302 --- /dev/null +++ b/tvix/store/src/tests/fixtures.rs @@ -0,0 +1,175 @@ +use crate::{ + proto::{self, Directory, DirectoryNode, FileNode, SymlinkNode}, + B3Digest, +}; +use lazy_static::lazy_static; + +pub const HELLOWORLD_BLOB_CONTENTS: &[u8] = b"Hello World!"; +pub const EMPTY_BLOB_CONTENTS: &[u8] = b""; + +lazy_static! { + pub static ref DUMMY_DIGEST: Vec<u8> = vec![ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, + ]; + pub static ref DUMMY_DATA_1: Vec<u8> = vec![0x01, 0x02, 0x03]; + pub static ref DUMMY_DATA_2: Vec<u8> = vec![0x04, 0x05]; + + pub static ref HELLOWORLD_BLOB_DIGEST: B3Digest = + blake3::hash(HELLOWORLD_BLOB_CONTENTS).as_bytes().into(); + pub static ref EMPTY_BLOB_DIGEST: B3Digest = + blake3::hash(EMPTY_BLOB_CONTENTS).as_bytes().into(); + + // 2 bytes + pub static ref BLOB_A: Vec<u8> = vec![0x00, 0x01]; + pub static ref BLOB_A_DIGEST: B3Digest = blake3::hash(&BLOB_A).as_bytes().into(); + + // 1MB + pub static ref BLOB_B: Vec<u8> = (0..255).collect::<Vec<u8>>().repeat(4 * 1024); + pub static ref BLOB_B_DIGEST: B3Digest = blake3::hash(&BLOB_B).as_bytes().into(); + + // Directories + pub static ref DIRECTORY_WITH_KEEP: proto::Directory = proto::Directory { + directories: vec![], + files: vec![FileNode { + name: ".keep".to_string(), + digest: EMPTY_BLOB_DIGEST.to_vec(), + size: 0, + executable: false, + }], + symlinks: vec![], + }; + pub static ref DIRECTORY_COMPLICATED: proto::Directory = proto::Directory { + directories: vec![DirectoryNode { + name: "keep".to_string(), + digest: DIRECTORY_WITH_KEEP.digest().to_vec(), + size: DIRECTORY_WITH_KEEP.size(), + }], + files: vec![FileNode { + name: ".keep".to_string(), + digest: EMPTY_BLOB_DIGEST.to_vec(), + size: 0, + executable: false, + }], + symlinks: vec![SymlinkNode { + name: "aa".to_string(), + target: "/nix/store/somewhereelse".to_string(), + }], + }; + pub static ref DIRECTORY_A: Directory = Directory::default(); + pub static ref DIRECTORY_B: Directory = Directory { + directories: vec![DirectoryNode { + name: "a".to_string(), + digest: DIRECTORY_A.digest().to_vec(), + size: DIRECTORY_A.size(), + }], + ..Default::default() + }; + pub static ref DIRECTORY_C: Directory = Directory { + directories: vec![ + DirectoryNode { + name: "a".to_string(), + digest: DIRECTORY_A.digest().to_vec(), + size: DIRECTORY_A.size(), + }, + DirectoryNode { + name: "a'".to_string(), + digest: DIRECTORY_A.digest().to_vec(), + size: DIRECTORY_A.size(), + } + ], + ..Default::default() + }; + + // output hash + pub static ref DUMMY_OUTPUT_HASH: Vec<u8> = vec![ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00 + ]; + + /// The NAR representation of a symlink pointing to `/nix/store/somewhereelse` + pub static ref NAR_CONTENTS_SYMLINK: Vec<u8> = vec![ + 13, 0, 0, 0, 0, 0, 0, 0, b'n', b'i', b'x', b'-', b'a', b'r', b'c', b'h', b'i', b'v', b'e', b'-', b'1', 0, + 0, 0, // "nix-archive-1" + 1, 0, 0, 0, 0, 0, 0, 0, b'(', 0, 0, 0, 0, 0, 0, 0, // "(" + 4, 0, 0, 0, 0, 0, 0, 0, b't', b'y', b'p', b'e', 0, 0, 0, 0, // "type" + 7, 0, 0, 0, 0, 0, 0, 0, b's', b'y', b'm', b'l', b'i', b'n', b'k', 0, // "symlink" + 6, 0, 0, 0, 0, 0, 0, 0, b't', b'a', b'r', b'g', b'e', b't', 0, 0, // target + 24, 0, 0, 0, 0, 0, 0, 0, b'/', b'n', b'i', b'x', b'/', b's', b't', b'o', b'r', b'e', b'/', b's', b'o', + b'm', b'e', b'w', b'h', b'e', b'r', b'e', b'e', b'l', b's', + b'e', // "/nix/store/somewhereelse" + 1, 0, 0, 0, 0, 0, 0, 0, b')', 0, 0, 0, 0, 0, 0, 0 // ")" + ]; + + /// The NAR representation of a regular file with the contents "Hello World!" + pub static ref NAR_CONTENTS_HELLOWORLD: Vec<u8> = vec![ + 13, 0, 0, 0, 0, 0, 0, 0, b'n', b'i', b'x', b'-', b'a', b'r', b'c', b'h', b'i', b'v', b'e', b'-', b'1', 0, + 0, 0, // "nix-archive-1" + 1, 0, 0, 0, 0, 0, 0, 0, b'(', 0, 0, 0, 0, 0, 0, 0, // "(" + 4, 0, 0, 0, 0, 0, 0, 0, b't', b'y', b'p', b'e', 0, 0, 0, 0, // "type" + 7, 0, 0, 0, 0, 0, 0, 0, b'r', b'e', b'g', b'u', b'l', b'a', b'r', 0, // "regular" + 8, 0, 0, 0, 0, 0, 0, 0, b'c', b'o', b'n', b't', b'e', b'n', b't', b's', // "contents" + 12, 0, 0, 0, 0, 0, 0, 0, b'H', b'e', b'l', b'l', b'o', b' ', b'W', b'o', b'r', b'l', b'd', b'!', 0, 0, + 0, 0, // "Hello World!" + 1, 0, 0, 0, 0, 0, 0, 0, b')', 0, 0, 0, 0, 0, 0, 0 // ")" + ]; + + /// The NAR representation of a more complicated directory structure. + pub static ref NAR_CONTENTS_COMPLICATED: Vec<u8> = vec![ + 13, 0, 0, 0, 0, 0, 0, 0, b'n', b'i', b'x', b'-', b'a', b'r', b'c', b'h', b'i', b'v', b'e', b'-', b'1', 0, + 0, 0, // "nix-archive-1" + 1, 0, 0, 0, 0, 0, 0, 0, b'(', 0, 0, 0, 0, 0, 0, 0, // "(" + 4, 0, 0, 0, 0, 0, 0, 0, b't', b'y', b'p', b'e', 0, 0, 0, 0, // "type" + 9, 0, 0, 0, 0, 0, 0, 0, b'd', b'i', b'r', b'e', b'c', b't', b'o', b'r', b'y', 0, 0, 0, 0, 0, 0, 0, // "directory" + 5, 0, 0, 0, 0, 0, 0, 0, b'e', b'n', b't', b'r', b'y', 0, 0, 0, // "entry" + 1, 0, 0, 0, 0, 0, 0, 0, b'(', 0, 0, 0, 0, 0, 0, 0, // "(" + 4, 0, 0, 0, 0, 0, 0, 0, b'n', b'a', b'm', b'e', 0, 0, 0, 0, // "name" + 5, 0, 0, 0, 0, 0, 0, 0, b'.', b'k', b'e', b'e', b'p', 0, 0, 0, // ".keep" + 4, 0, 0, 0, 0, 0, 0, 0, b'n', b'o', b'd', b'e', 0, 0, 0, 0, // "node" + 1, 0, 0, 0, 0, 0, 0, 0, b'(', 0, 0, 0, 0, 0, 0, 0, // "(" + 4, 0, 0, 0, 0, 0, 0, 0, b't', b'y', b'p', b'e', 0, 0, 0, 0, // "type" + 7, 0, 0, 0, 0, 0, 0, 0, b'r', b'e', b'g', b'u', b'l', b'a', b'r', 0, // "regular" + 8, 0, 0, 0, 0, 0, 0, 0, b'c', b'o', b'n', b't', b'e', b'n', b't', b's', // "contents" + 0, 0, 0, 0, 0, 0, 0, 0, // "" + 1, 0, 0, 0, 0, 0, 0, 0, b')', 0, 0, 0, 0, 0, 0, 0, // ")" + 1, 0, 0, 0, 0, 0, 0, 0, b')', 0, 0, 0, 0, 0, 0, 0, // ")" + 5, 0, 0, 0, 0, 0, 0, 0, b'e', b'n', b't', b'r', b'y', 0, 0, 0, // "entry" + 1, 0, 0, 0, 0, 0, 0, 0, b'(', 0, 0, 0, 0, 0, 0, 0, // "(" + 4, 0, 0, 0, 0, 0, 0, 0, b'n', b'a', b'm', b'e', 0, 0, 0, 0, // "name" + 2, 0, 0, 0, 0, 0, 0, 0, b'a', b'a', 0, 0, 0, 0, 0, 0, // "aa" + 4, 0, 0, 0, 0, 0, 0, 0, b'n', b'o', b'd', b'e', 0, 0, 0, 0, // "node" + 1, 0, 0, 0, 0, 0, 0, 0, b'(', 0, 0, 0, 0, 0, 0, 0, // "(" + 4, 0, 0, 0, 0, 0, 0, 0, b't', b'y', b'p', b'e', 0, 0, 0, 0, // "type" + 7, 0, 0, 0, 0, 0, 0, 0, b's', b'y', b'm', b'l', b'i', b'n', b'k', 0, // "symlink" + 6, 0, 0, 0, 0, 0, 0, 0, b't', b'a', b'r', b'g', b'e', b't', 0, 0, // target + 24, 0, 0, 0, 0, 0, 0, 0, b'/', b'n', b'i', b'x', b'/', b's', b't', b'o', b'r', b'e', b'/', b's', b'o', + b'm', b'e', b'w', b'h', b'e', b'r', b'e', b'e', b'l', b's', + b'e', // "/nix/store/somewhereelse" + 1, 0, 0, 0, 0, 0, 0, 0, b')', 0, 0, 0, 0, 0, 0, 0, // ")" + 1, 0, 0, 0, 0, 0, 0, 0, b')', 0, 0, 0, 0, 0, 0, 0, // ")" + 5, 0, 0, 0, 0, 0, 0, 0, b'e', b'n', b't', b'r', b'y', 0, 0, 0, // "entry" + 1, 0, 0, 0, 0, 0, 0, 0, b'(', 0, 0, 0, 0, 0, 0, 0, // "(" + 4, 0, 0, 0, 0, 0, 0, 0, b'n', b'a', b'm', b'e', 0, 0, 0, 0, // "name" + 4, 0, 0, 0, 0, 0, 0, 0, b'k', b'e', b'e', b'p', 0, 0, 0, 0, // "keep" + 4, 0, 0, 0, 0, 0, 0, 0, b'n', b'o', b'd', b'e', 0, 0, 0, 0, // "node" + 1, 0, 0, 0, 0, 0, 0, 0, b'(', 0, 0, 0, 0, 0, 0, 0, // "(" + 4, 0, 0, 0, 0, 0, 0, 0, b't', b'y', b'p', b'e', 0, 0, 0, 0, // "type" + 9, 0, 0, 0, 0, 0, 0, 0, b'd', b'i', b'r', b'e', b'c', b't', b'o', b'r', b'y', 0, 0, 0, 0, 0, 0, 0, // "directory" + 5, 0, 0, 0, 0, 0, 0, 0, b'e', b'n', b't', b'r', b'y', 0, 0, 0, // "entry" + 1, 0, 0, 0, 0, 0, 0, 0, b'(', 0, 0, 0, 0, 0, 0, 0, // "(" + 4, 0, 0, 0, 0, 0, 0, 0, b'n', b'a', b'm', b'e', 0, 0, 0, 0, // "name" + 5, 0, 0, 0, 0, 0, 0, 0, 46, 107, 101, 101, 112, 0, 0, 0, // ".keep" + 4, 0, 0, 0, 0, 0, 0, 0, 110, 111, 100, 101, 0, 0, 0, 0, // "node" + 1, 0, 0, 0, 0, 0, 0, 0, b'(', 0, 0, 0, 0, 0, 0, 0, // "(" + 4, 0, 0, 0, 0, 0, 0, 0, b't', b'y', b'p', b'e', 0, 0, 0, 0, // "type" + 7, 0, 0, 0, 0, 0, 0, 0, b'r', b'e', b'g', b'u', b'l', b'a', b'r', 0, // "regular" + 8, 0, 0, 0, 0, 0, 0, 0, b'c', b'o', b'n', b't', b'e', b'n', b't', b's', // "contents" + 0, 0, 0, 0, 0, 0, 0, 0, // "" + 1, 0, 0, 0, 0, 0, 0, 0, b')', 0, 0, 0, 0, 0, 0, 0, // ")" + 1, 0, 0, 0, 0, 0, 0, 0, b')', 0, 0, 0, 0, 0, 0, 0, // ")" + 1, 0, 0, 0, 0, 0, 0, 0, b')', 0, 0, 0, 0, 0, 0, 0, // ")" + 1, 0, 0, 0, 0, 0, 0, 0, b')', 0, 0, 0, 0, 0, 0, 0, // ")" + 1, 0, 0, 0, 0, 0, 0, 0, b')', 0, 0, 0, 0, 0, 0, 0, // ")" + ]; +} diff --git a/tvix/store/src/tests/import.rs b/tvix/store/src/tests/import.rs new file mode 100644 index 000000000000..8b66cb024bf0 --- /dev/null +++ b/tvix/store/src/tests/import.rs @@ -0,0 +1,113 @@ +use super::utils::{gen_blob_service, gen_directory_service}; +use crate::blobservice::BlobService; +use crate::directoryservice::DirectoryService; +use crate::import::ingest_path; +use crate::proto; +use crate::tests::fixtures::DIRECTORY_COMPLICATED; +use crate::tests::fixtures::*; +use tempfile::TempDir; + +#[cfg(target_family = "unix")] +#[test] +fn symlink() { + let tmpdir = TempDir::new().unwrap(); + + std::fs::create_dir_all(&tmpdir).unwrap(); + std::os::unix::fs::symlink( + "/nix/store/somewhereelse", + tmpdir.path().join("doesntmatter"), + ) + .unwrap(); + + let root_node = ingest_path( + &mut gen_blob_service(), + &mut gen_directory_service(), + tmpdir.path().join("doesntmatter"), + ) + .expect("must succeed"); + + assert_eq!( + crate::proto::node::Node::Symlink(proto::SymlinkNode { + name: "doesntmatter".to_string(), + target: "/nix/store/somewhereelse".to_string(), + }), + root_node, + ) +} + +#[test] +fn single_file() { + let tmpdir = TempDir::new().unwrap(); + + std::fs::write(tmpdir.path().join("root"), HELLOWORLD_BLOB_CONTENTS).unwrap(); + + let mut blob_service = gen_blob_service(); + + let root_node = ingest_path( + &mut blob_service, + &mut gen_directory_service(), + tmpdir.path().join("root"), + ) + .expect("must succeed"); + + assert_eq!( + crate::proto::node::Node::File(proto::FileNode { + name: "root".to_string(), + digest: HELLOWORLD_BLOB_DIGEST.to_vec(), + size: HELLOWORLD_BLOB_CONTENTS.len() as u32, + executable: false, + }), + root_node, + ); + + // ensure the blob has been uploaded + assert!(blob_service.has(&HELLOWORLD_BLOB_DIGEST).unwrap()); +} + +#[test] +fn complicated() { + let tmpdir = TempDir::new().unwrap(); + + // File ``.keep` + std::fs::write(tmpdir.path().join(".keep"), vec![]).unwrap(); + // Symlink `aa` + std::os::unix::fs::symlink("/nix/store/somewhereelse", tmpdir.path().join("aa")).unwrap(); + // Directory `keep` + std::fs::create_dir(tmpdir.path().join("keep")).unwrap(); + // File ``keep/.keep` + std::fs::write(tmpdir.path().join("keep").join(".keep"), vec![]).unwrap(); + + let mut blob_service = gen_blob_service(); + let mut directory_service = gen_directory_service(); + + let root_node = ingest_path(&mut blob_service, &mut directory_service, tmpdir.path()) + .expect("must succeed"); + + // ensure root_node matched expectations + assert_eq!( + crate::proto::node::Node::Directory(proto::DirectoryNode { + name: tmpdir + .path() + .file_name() + .unwrap() + .to_string_lossy() + .to_string(), + digest: DIRECTORY_COMPLICATED.digest().to_vec(), + size: DIRECTORY_COMPLICATED.size(), + }), + root_node, + ); + + // ensure DIRECTORY_WITH_KEEP and DIRECTORY_COMPLICATED have been uploaded + assert!(directory_service + .get(&DIRECTORY_WITH_KEEP.digest()) + .unwrap() + .is_some()); + assert!(directory_service + .get(&DIRECTORY_COMPLICATED.digest()) + .unwrap() + .is_some()); + + // ensure EMPTY_BLOB_CONTENTS has been uploaded + assert!(blob_service.has(&EMPTY_BLOB_DIGEST).unwrap()); +} diff --git a/tvix/store/src/tests/mod.rs b/tvix/store/src/tests/mod.rs new file mode 100644 index 000000000000..8ceea01e3190 --- /dev/null +++ b/tvix/store/src/tests/mod.rs @@ -0,0 +1,4 @@ +pub mod fixtures; +mod import; +mod nar_renderer; +pub mod utils; diff --git a/tvix/store/src/tests/nar_renderer.rs b/tvix/store/src/tests/nar_renderer.rs new file mode 100644 index 000000000000..f13107b1e48d --- /dev/null +++ b/tvix/store/src/tests/nar_renderer.rs @@ -0,0 +1,189 @@ +use crate::blobservice::BlobService; +use crate::blobservice::BlobWriter; +use crate::directoryservice::DirectoryService; +use crate::nar::NARRenderer; +use crate::proto::DirectoryNode; +use crate::proto::FileNode; +use crate::proto::SymlinkNode; +use crate::tests::fixtures::*; +use crate::tests::utils::*; +use std::io; + +#[test] +fn single_symlink() { + let renderer = NARRenderer::new(gen_blob_service(), gen_directory_service()); + // don't put anything in the stores, as we don't actually do any requests. + + let mut buf: Vec<u8> = vec![]; + + renderer + .write_nar( + &mut buf, + &crate::proto::node::Node::Symlink(SymlinkNode { + name: "doesntmatter".to_string(), + target: "/nix/store/somewhereelse".to_string(), + }), + ) + .expect("must succeed"); + + assert_eq!(buf, NAR_CONTENTS_SYMLINK.to_vec()); +} + +/// Make sure the NARRenderer fails if the blob size in the proto node doesn't +/// match what's in the store. +#[test] +fn single_file_missing_blob() { + let renderer = NARRenderer::new(gen_blob_service(), gen_directory_service()); + let mut buf: Vec<u8> = vec![]; + + let e = renderer + .write_nar( + &mut buf, + &crate::proto::node::Node::File(FileNode { + name: "doesntmatter".to_string(), + digest: HELLOWORLD_BLOB_DIGEST.to_vec(), + size: HELLOWORLD_BLOB_CONTENTS.len() as u32, + executable: false, + }), + ) + .expect_err("must fail"); + + match e { + crate::nar::RenderError::NARWriterError(e) => { + assert_eq!(io::ErrorKind::NotFound, e.kind()); + } + _ => panic!("unexpected error: {:?}", e), + } +} + +/// Make sure the NAR Renderer fails if the returned blob meta has another size +/// than specified in the proto node. +#[test] +fn single_file_wrong_blob_size() { + let blob_service = gen_blob_service(); + + // insert blob into the store + let mut writer = blob_service.open_write().unwrap(); + io::copy( + &mut io::Cursor::new(HELLOWORLD_BLOB_CONTENTS.to_vec()), + &mut writer, + ) + .unwrap(); + assert_eq!(HELLOWORLD_BLOB_DIGEST.clone(), writer.close().unwrap()); + + let renderer = NARRenderer::new(blob_service, gen_directory_service()); + + // Test with a root FileNode of a too big size + { + let mut buf: Vec<u8> = vec![]; + let e = renderer + .write_nar( + &mut buf, + &crate::proto::node::Node::File(FileNode { + name: "doesntmatter".to_string(), + digest: HELLOWORLD_BLOB_DIGEST.to_vec(), + size: 42, // <- note the wrong size here! + executable: false, + }), + ) + .expect_err("must fail"); + + match e { + crate::nar::RenderError::NARWriterError(e) => { + assert_eq!(io::ErrorKind::UnexpectedEof, e.kind()); + } + _ => panic!("unexpected error: {:?}", e), + } + } + + // Test with a root FileNode of a too small size + { + let mut buf: Vec<u8> = vec![]; + let e = renderer + .write_nar( + &mut buf, + &crate::proto::node::Node::File(FileNode { + name: "doesntmatter".to_string(), + digest: HELLOWORLD_BLOB_DIGEST.to_vec(), + size: 2, // <- note the wrong size here! + executable: false, + }), + ) + .expect_err("must fail"); + + match e { + crate::nar::RenderError::NARWriterError(e) => { + assert_eq!(io::ErrorKind::InvalidInput, e.kind()); + } + _ => panic!("unexpected error: {:?}", e), + } + } +} + +#[test] +fn single_file() { + let blob_service = gen_blob_service(); + + // insert blob into the store + let mut writer = blob_service.open_write().unwrap(); + io::copy( + &mut io::Cursor::new(HELLOWORLD_BLOB_CONTENTS.to_vec()), + &mut writer, + ) + .unwrap(); + assert_eq!(HELLOWORLD_BLOB_DIGEST.clone(), writer.close().unwrap()); + + let renderer = NARRenderer::new(blob_service, gen_directory_service()); + let mut buf: Vec<u8> = vec![]; + + renderer + .write_nar( + &mut buf, + &crate::proto::node::Node::File(FileNode { + name: "doesntmatter".to_string(), + digest: HELLOWORLD_BLOB_DIGEST.to_vec(), + size: HELLOWORLD_BLOB_CONTENTS.len() as u32, + executable: false, + }), + ) + .expect("must succeed"); + + assert_eq!(buf, NAR_CONTENTS_HELLOWORLD.to_vec()); +} + +#[test] +fn test_complicated() { + let blob_service = gen_blob_service(); + let directory_service = gen_directory_service(); + + // put all data into the stores. + // insert blob into the store + let mut writer = blob_service.open_write().unwrap(); + io::copy( + &mut io::Cursor::new(EMPTY_BLOB_CONTENTS.to_vec()), + &mut writer, + ) + .unwrap(); + assert_eq!(EMPTY_BLOB_DIGEST.clone(), writer.close().unwrap()); + + directory_service.put(DIRECTORY_WITH_KEEP.clone()).unwrap(); + directory_service + .put(DIRECTORY_COMPLICATED.clone()) + .unwrap(); + + let renderer = NARRenderer::new(blob_service, directory_service); + let mut buf: Vec<u8> = vec![]; + + renderer + .write_nar( + &mut buf, + &crate::proto::node::Node::Directory(DirectoryNode { + name: "doesntmatter".to_string(), + digest: DIRECTORY_COMPLICATED.digest().to_vec(), + size: DIRECTORY_COMPLICATED.size(), + }), + ) + .expect("must succeed"); + + assert_eq!(buf, NAR_CONTENTS_COMPLICATED.to_vec()); +} diff --git a/tvix/store/src/tests/utils.rs b/tvix/store/src/tests/utils.rs new file mode 100644 index 000000000000..2991feed41db --- /dev/null +++ b/tvix/store/src/tests/utils.rs @@ -0,0 +1,17 @@ +use crate::{ + blobservice::{BlobService, MemoryBlobService}, + directoryservice::{DirectoryService, MemoryDirectoryService}, + pathinfoservice::{MemoryPathInfoService, PathInfoService}, +}; + +pub fn gen_blob_service() -> impl BlobService + Send + Sync + Clone + 'static { + MemoryBlobService::default() +} + +pub fn gen_directory_service() -> impl DirectoryService + Send + Sync + Clone + 'static { + MemoryDirectoryService::default() +} + +pub fn gen_pathinfo_service() -> impl PathInfoService { + MemoryPathInfoService::default() +} diff --git a/tvix/verify-lang-tests/default.nix b/tvix/verify-lang-tests/default.nix new file mode 100644 index 000000000000..2267ecb77d2e --- /dev/null +++ b/tvix/verify-lang-tests/default.nix @@ -0,0 +1,197 @@ +# SPDX-License-Identifier: LGPL-2.1-only +# SPDX-FileCopyrightText: © 2022 The TVL Contributors +# SPDX-FileCopyrightText: © 2004-2022 The Nix Contributors +# +# Execute language tests found in tvix_tests and nix_tests +# using the C++ Nix implementation. Based on NixOS/nix:tests/lang.sh. +{ depot, pkgs, lib, ... }: + +let + testRoot = ../eval/src/tests; + + inherit (pkgs.buildPackages) nix nix_latest; + + parseTest = dir: baseName: + let + tokens = builtins.match "(eval|parse)-(okay|fail).+\\.nix" baseName; + in + if tokens == null + then null + else { + type = builtins.elemAt tokens 0; + expectedSuccess = (builtins.elemAt tokens 1) == "okay"; + fileName = "${dir}/${baseName}"; + }; + + allLangTests = + lib.concatMap + ( + dir: + lib.pipe + (builtins.readDir (testRoot + "/${dir}")) + [ + builtins.attrNames + (builtins.map (parseTest dir)) + (builtins.filter (t: t != null)) + ] + ) [ "nix_tests" "nix_tests/notyetpassing" "tvix_tests" "tvix_tests/notyetpassing" ]; + + skippedLangTests = { + # TODO(sterni): set up NIX_PATH in sandbox + "eval-okay-search-path.nix" = true; + # Floating point precision differs between tvix and Nix + "eval-okay-fromjson.nix" = true; + # C++ Nix can't TCO + "eval-okay-tail-call-1.nix" = true; + # Ordering change after 2.3 + "eval-okay-xml.nix" = [ nix ]; + # Missing builtins in Nix 2.3 + "eval-okay-ceil.nix" = [ nix ]; + "eval-okay-floor-ceil.nix" = [ nix ]; + "eval-okay-floor.nix" = [ nix ]; + "eval-okay-groupBy.nix" = [ nix ]; + "eval-okay-zipAttrsWith.nix" = [ nix ]; + # Comparable lists are not in Nix 2.3 + "eval-okay-sort.nix" = [ nix ]; + "eval-okay-compare-lists.nix" = [ nix ]; + "eval-okay-value-pointer-compare.nix" = [ nix ]; + # getAttrPos gains support for functionArgs-returned sets after 2.3 + "eval-okay-getattrpos-functionargs.nix" = [ nix ]; + # groupBy appeared (long) after 2.3 + "eval-okay-builtins-groupby-thunk.nix" = [ nix ]; + }; + + runCppNixLangTests = cpp-nix: + let + testCommand = { fileName, type, expectedSuccess, ... }: + let + testBase = lib.removeSuffix ".nix" fileName; + expFile = + let + possibleFiles = + builtins.filter + (path: builtins.pathExists (testRoot + "/${path}")) + (builtins.map + (ext: "${testBase}.${ext}") + [ "exp" "exp.xml" ]); + in + if possibleFiles == [ ] then null else builtins.head possibleFiles; + outFile = "${testBase}.out"; + + # Skip if skippedLangTests prescribes it (possibly just for the current nix) + # or if we are missing an exp file for an eval-okay test. + skip = + let + doSkip = skippedLangTests.${builtins.baseNameOf fileName} or false; + in + if type == "eval" && expectedSuccess && (expFile == null) then true + else if builtins.isBool doSkip then doSkip + else builtins.any (drv: cpp-nix == drv) doSkip; + + flagsFile = "${testBase}.flags"; + + instantiateFlags = + lib.escapeShellArgs + ( + [ "--${type}" fileName ] + ++ lib.optionals (type == "eval") [ "--strict" ] + ++ lib.optionals (expFile != null && lib.hasSuffix "xml" expFile) + [ + "--no-location" + "--xml" + ] + ) + + lib.optionalString (builtins.pathExists (testRoot + "/${flagsFile}")) + " $(cat '${flagsFile}')"; + in + + if skip + then "echo \"SKIP ${type} ${fileName}\"\n" + else '' + thisTestPassed=true + + echo "RUN ${type} ${fileName} ${ + lib.optionalString (!expectedSuccess) "(expecting failure)" + }" + + if ! expect ${if expectedSuccess then "0" else "1"} \ + nix-instantiate ${instantiateFlags} \ + ${if expectedSuccess then "1" else "2"}> \ + ${if expFile != null then outFile else "/dev/null"}; + then + echo -n "FAIL" + thisTestPassed=false + fi + '' + lib.optionalString (expFile != null) '' + if ! diff --color=always -u '${outFile}' '${expFile}'; then + thisTestPassed=false + fi + '' + '' + if $thisTestPassed; then + echo -n "PASS" + else + echo -n "FAIL" + passed=false + fi + + echo " ${type} ${fileName}" + + unset thisTestPassed + ''; + in + + pkgs.stdenv.mkDerivation { + name = "cpp-${cpp-nix.name}-run-lang-tests"; + + nativeBuildInputs = [ cpp-nix ]; + + # Obtain tests via the unpackPhase + src = testRoot; + dontConfigure = true; + + # Environment expected by the test suite + TEST_VAR = "foo"; + + buildPhase = '' + # Make nix-instantiate happy in the sandbox + export NIX_STORE_DIR="$(realpath "$(mktemp -d store.XXXXXXXXXX)")" + export NIX_STATE_DIR="$(realpath "$(mktemp -d state.XXXXXXXXXX)")" + + # Helper function to check expected exit code + expect() { + local expected res + expected="$1" + shift + set +e + "$@" + res="$?" + set -e + [[ $res -eq $expected ]] + } + + # Track test results so far + passed=true + + source "$testCommandsPath" + ''; + + # Actually runs into the argv limit + passAsFile = [ "testCommands" ]; + testCommands = lib.concatMapStrings testCommand allLangTests; + + installPhase = '' + if $passed; then + touch $out + else + echo "Some test(s) failed!" + exit 1 + fi + ''; + }; + +in + +depot.nix.readTree.drvTargets { + "nix-2.3" = runCppNixLangTests nix; + "nix-${lib.versions.majorMinor nix_latest.version}" = runCppNixLangTests nix_latest; +} diff --git a/tvix/website/default.nix b/tvix/website/default.nix new file mode 100644 index 000000000000..2dd939e8a939 --- /dev/null +++ b/tvix/website/default.nix @@ -0,0 +1,32 @@ +{ depot, lib, pkgs, ... }: + +let + # https://developers.google.com/search/docs/advanced/structured-data/logo + structuredData = { + "@context" = "https://schema.org"; + "@type" = "Organisation"; + url = "https://tvl.fyi"; + logo = "https://static.tvl.fyi/latest/logo-animated.svg"; + }; + + # All Tvix-related blog posts from the main TVL website + tvixPosts = builtins.filter + (post: !(post.draft or false) && (lib.hasInfix "Tvix" post.title)) + depot.web.tvl.blog.posts; + + postListEntries = map (p: "* [${p.title}](https://tvl.fyi/blog/${p.key})") tvixPosts; + + landing = depot.web.tvl.template { + title = "Tvix - A new implementation of Nix"; + content = '' + ${builtins.readFile ./landing-en.md} + ${builtins.concatStringsSep "\n" postListEntries} + ''; + }; + +in +pkgs.runCommand "tvix-website" { } '' + mkdir $out + cp ${landing} $out/index.html + cp ${./tvix-logo.webp} $out/tvix-logo.webp +'' diff --git a/tvix/website/landing-en.md b/tvix/website/landing-en.md new file mode 100644 index 000000000000..d95320f11c21 --- /dev/null +++ b/tvix/website/landing-en.md @@ -0,0 +1,36 @@ +<img class="tvl-logo" src="./tvix-logo.webp" + alt="A candy bar in different shades of blue that says 'Tvix by TVL' on it"> + +------------------ + +Tvix is a new implementation of Nix, a purely-functional package manager. It +aims to have a modular implementation, in which different components can be +reused or replaced based on the use-case. + +Tvix is developed as a GPLv3-licensed open-source project by +[TVL][], with source code available in the [TVL monorepo][]. + +There are several projects within Tvix, such as: + +* `//tvix/eval` - an implementation of the Nix programming language +* `//tvix/serde` - a Rust library for using the Nix language for config files +* `//tvix/nix-compat` - a Rust library for compatibility with C++ Nix + features like encodings and hashing schemes +* ... and a handful others! + +The language evaluator can be toyed with in [Tvixbolt][], and you can check out +the [Tvix README][] ([GitHub mirror][gh]) for additional information on the +project and development workflows. + +Developer documentation for some parts of Tvix is [available online][docs]. + +[TVL]: https://tvl.fyi +[TVL monorepo]: https://cs.tvl.fyi/depot/-/tree/tvix +[Tvixbolt]: https://tvixbolt.tvl.su +[Tvix README]: https://code.tvl.fyi/about/tvix +[gh]: https://github.com/tvlfyi/tvix/ +[docs]: https://docs.tvix.dev + +------------------- + +Check out the latest Tvix-related blog posts from TVL's website: diff --git a/tvix/website/tvix-logo.webp b/tvix/website/tvix-logo.webp new file mode 100644 index 000000000000..07bffc18b7b9 --- /dev/null +++ b/tvix/website/tvix-logo.webp Binary files differ |