diff options
Diffstat (limited to 'tools')
83 files changed, 3749 insertions, 1793 deletions
diff --git a/tools/cheddar/.gitignore b/tools/cheddar/.gitignore new file mode 100644 index 000000000000..2f7896d1d136 --- /dev/null +++ b/tools/cheddar/.gitignore @@ -0,0 +1 @@ +target/ diff --git a/tools/cheddar/.skip-subtree b/tools/cheddar/.skip-subtree new file mode 100644 index 000000000000..e69de29bb2d1 --- /dev/null +++ b/tools/cheddar/.skip-subtree diff --git a/tools/cheddar/Cargo.lock b/tools/cheddar/Cargo.lock new file mode 100644 index 000000000000..0635209a1d0a --- /dev/null +++ b/tools/cheddar/Cargo.lock @@ -0,0 +1,1207 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "adler32" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234" + +[[package]] +name = "aho-corasick" +version = "0.7.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" +dependencies = [ + "memchr", +] + +[[package]] +name = "ansi_term" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" +dependencies = [ + "winapi", +] + +[[package]] +name = "arrayref" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544" + +[[package]] +name = "arrayvec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" + +[[package]] +name = "ascii" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbf56136a5198c7b01a49e3afcbef6cf84597273d298f54432926024107b0109" + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi", + "libc", + "winapi", +] + +[[package]] +name = "autocfg" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" + +[[package]] +name = "base64" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" + +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "blake2b_simd" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afa748e348ad3be8263be728124b24a24f268266f6f5d58af9d75f6a40b5c587" +dependencies = [ + "arrayref", + "arrayvec", + "constant_time_eq", +] + +[[package]] +name = "block-buffer" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" +dependencies = [ + "block-padding", + "byte-tools", + "byteorder", + "generic-array", +] + +[[package]] +name = "block-padding" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" +dependencies = [ + "byte-tools", +] + +[[package]] +name = "brotli-sys" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4445dea95f4c2b41cde57cc9fee236ae4dbae88d8fcbdb4750fc1bb5d86aaecd" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "brotli2" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cb036c3eade309815c15ddbacec5b22c4d1f3983a774ab2eac2e3e9ea85568e" +dependencies = [ + "brotli-sys", + "libc", +] + +[[package]] +name = "buf_redux" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b953a6887648bb07a535631f2bc00fbdb2a2216f135552cb3f534ed136b9c07f" +dependencies = [ + "memchr", + "safemem", +] + +[[package]] +name = "byte-tools" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "cc" +version = "1.0.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79c2681d6594606957bbb8631c4b90a7fcaaa72cdb714743a437b156d6a7eedd" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "cheddar" +version = "0.2.0" +dependencies = [ + "clap", + "comrak", + "lazy_static", + "regex", + "rouille", + "serde", + "serde_json", + "syntect", +] + +[[package]] +name = "chrono" +version = "0.4.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73" +dependencies = [ + "libc", + "num-integer", + "num-traits", + "time 0.1.43", + "winapi", +] + +[[package]] +name = "chunked_transfer" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fff857943da45f546682664a79488be82e69e43c1a7a2307679ab9afb3a66d2e" + +[[package]] +name = "clap" +version = "2.33.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002" +dependencies = [ + "ansi_term", + "atty", + "bitflags", + "strsim", + "textwrap", + "unicode-width", + "vec_map", +] + +[[package]] +name = "comrak" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b423acba50d5016684beaf643f9991e622633a4c858be6885653071c2da2b0c6" +dependencies = [ + "clap", + "entities", + "lazy_static", + "pest", + "pest_derive", + "regex", + "shell-words", + "twoway 0.2.2", + "typed-arena", + "unicode_categories", + "xdg", +] + +[[package]] +name = "constant_time_eq" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" + +[[package]] +name = "crc32fast" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db" +dependencies = [ + "cfg-if", + "lazy_static", +] + +[[package]] +name = "deflate" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f95bf05dffba6e6cce8dfbb30def788154949ccd9aed761b472119c21e01c70" +dependencies = [ + "adler32", + "gzip-header", +] + +[[package]] +name = "digest" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" +dependencies = [ + "generic-array", +] + +[[package]] +name = "dirs" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fd78930633bd1c6e35c4b42b1df7b0cbc6bc191146e512bb3bedf243fcc3901" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "entities" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5320ae4c3782150d900b79807611a59a99fc9a1d61d686faafc24b93fc8d7ca" + +[[package]] +name = "fake-simd" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" + +[[package]] +name = "filetime" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "975ccf83d8d9d0d84682850a38c8169027be83368805971cc4f238c2b245bc98" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall 0.2.10", + "winapi", +] + +[[package]] +name = "flate2" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f" +dependencies = [ + "cfg-if", + "crc32fast", + "libc", + "miniz_oxide", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "form_urlencoded" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191" +dependencies = [ + "matches", + "percent-encoding", +] + +[[package]] +name = "generic-array" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffdf9f34f1447443d37393cc6c2b8313aebddcd96906caf34e54c68d8e57d7bd" +dependencies = [ + "typenum", +] + +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.10.2+wasi-snapshot-preview1", +] + +[[package]] +name = "gzip-header" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0131feb3d3bb2a5a238d8a4d09f6353b7ebfdc52e77bccbf4ea6eaa751dde639" +dependencies = [ + "crc32fast", +] + +[[package]] +name = "hashbrown" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "httparse" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acd94fdbe1d4ff688b67b04eee2e17bd50995534a61539e45adfefb45e5e5503" + +[[package]] +name = "idna" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "indexmap" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc633605454125dec4b66843673f01c7df2b89479b32e0ed634e43a91cff62a5" +dependencies = [ + "autocfg", + "hashbrown", +] + +[[package]] +name = "itoa" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + +[[package]] +name = "libc" +version = "0.2.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd8f7255a17a627354f321ef0055d63b898c6fb27eff628af4d1b66b7331edf6" + +[[package]] +name = "line-wrap" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f30344350a2a51da54c1d53be93fade8a237e545dbcc4bdbe635413f2117cab9" +dependencies = [ + "safemem", +] + +[[package]] +name = "linked-hash-map" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3" + +[[package]] +name = "log" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "maplit" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" + +[[package]] +name = "matches" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" + +[[package]] +name = "memchr" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" + +[[package]] +name = "mime" +version = "0.3.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" + +[[package]] +name = "mime_guess" +version = "2.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2684d4c2e97d99848d30b324b00c8fcc7e5c897b7cbb5819b09e7c90e8baf212" +dependencies = [ + "mime", + "unicase", +] + +[[package]] +name = "miniz_oxide" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" +dependencies = [ + "adler", + "autocfg", +] + +[[package]] +name = "multipart" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00dec633863867f29cb39df64a397cdf4a6354708ddd7759f70c7fb51c5f9182" +dependencies = [ + "buf_redux", + "httparse", + "log", + "mime", + "mime_guess", + "quick-error", + "rand", + "safemem", + "tempfile", + "twoway 0.1.8", +] + +[[package]] +name = "num-integer" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" +dependencies = [ + "autocfg", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "onig" +version = "6.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b17403cf40f61e3ee059e3e90b7fc0a2953297168d4379b160f80d18fed848a4" +dependencies = [ + "bitflags", + "lazy_static", + "libc", + "onig_sys", +] + +[[package]] +name = "onig_sys" +version = "69.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dd3eee045c84695b53b20255bb7317063df090b68e18bfac0abb6c39cf7f33e" +dependencies = [ + "cc", + "pkg-config", +] + +[[package]] +name = "opaque-debug" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" + +[[package]] +name = "percent-encoding" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" + +[[package]] +name = "pest" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53" +dependencies = [ + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "833d1ae558dc601e9a60366421196a8d94bc0ac980476d0b67e1d0988d72b2d0" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99b8db626e31e5b81787b9783425769681b347011cc59471e33ea46d2ea0cf55" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pest_meta" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54be6e404f5317079812fc8f9f5279de376d8856929e21c184ecf6bbd692a11d" +dependencies = [ + "maplit", + "pest", + "sha-1", +] + +[[package]] +name = "pkg-config" +version = "0.3.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c9b1041b4387893b91ee6746cddfc28516aff326a3519fb2adf820932c5e6cb" + +[[package]] +name = "plist" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a38d026d73eeaf2ade76309d0c65db5a35ecf649e3cec428db316243ea9d6711" +dependencies = [ + "base64", + "chrono", + "indexmap", + "line-wrap", + "serde", + "xml-rs", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" + +[[package]] +name = "proc-macro2" +version = "1.0.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9f5105d4fdaab20335ca9565e106a5d9b82b6219b5ba735731124ac6711d23d" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + +[[package]] +name = "quote" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", + "rand_hc", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" +dependencies = [ + "getrandom 0.2.3", +] + +[[package]] +name = "rand_hc" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7" +dependencies = [ + "rand_core", +] + +[[package]] +name = "redox_syscall" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" + +[[package]] +name = "redox_syscall" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" +dependencies = [ + "bitflags", +] + +[[package]] +name = "redox_users" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de0737333e7a9502c789a36d7c7fa6092a49895d4faa31ca5df163857ded2e9d" +dependencies = [ + "getrandom 0.1.16", + "redox_syscall 0.1.57", + "rust-argon2", +] + +[[package]] +name = "regex" +version = "1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.6.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" + +[[package]] +name = "remove_dir_all" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" +dependencies = [ + "winapi", +] + +[[package]] +name = "rouille" +version = "3.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc1bcf3b32bd9ef568402e750404c369ff172a6a34597c858f8ccf5f3bed013" +dependencies = [ + "base64", + "brotli2", + "chrono", + "deflate", + "filetime", + "multipart", + "num_cpus", + "percent-encoding", + "rand", + "serde", + "serde_derive", + "serde_json", + "sha1", + "threadpool", + "time 0.3.3", + "tiny_http", + "url", +] + +[[package]] +name = "rust-argon2" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b18820d944b33caa75a71378964ac46f58517c92b6ae5f762636247c09e78fb" +dependencies = [ + "base64", + "blake2b_simd", + "constant_time_eq", + "crossbeam-utils", +] + +[[package]] +name = "ryu" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" + +[[package]] +name = "safemem" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "serde" +version = "1.0.130" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f12d06de37cf59146fbdecab66aa99f9fe4f78722e3607577a5375d66bd0c913" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.130" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7bc1a1ab1961464eae040d96713baa5a724a8152c1222492465b54322ec508b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f690853975602e1bfe1ccbf50504d67174e3bcf340f23b5ea9992e0587a52d8" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha-1" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d94d0bede923b3cea61f3f1ff57ff8cdfd77b400fb8f9998949e0cf04163df" +dependencies = [ + "block-buffer", + "digest", + "fake-simd", + "opaque-debug", +] + +[[package]] +name = "sha1" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2579985fda508104f7587689507983eadd6a6e84dd35d6d115361f530916fa0d" + +[[package]] +name = "shell-words" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6fa3938c99da4914afedd13bf3d79bcb6c277d1b2c398d23257a304d9e1b074" + +[[package]] +name = "strsim" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" + +[[package]] +name = "syn" +version = "1.0.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d010a1623fbd906d51d650a9916aaefc05ffa0e4053ff7fe601167f3e715d194" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "syntect" +version = "4.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b20815bbe80ee0be06e6957450a841185fcf690fe0178f14d77a05ce2caa031" +dependencies = [ + "bincode", + "bitflags", + "flate2", + "fnv", + "lazy_static", + "lazycell", + "onig", + "plist", + "regex-syntax", + "serde", + "serde_derive", + "serde_json", + "walkdir", + "yaml-rust", +] + +[[package]] +name = "tempfile" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dac1c663cfc93810f88aed9b8941d48cabf856a1b111c29a40439018d870eb22" +dependencies = [ + "cfg-if", + "libc", + "rand", + "redox_syscall 0.2.10", + "remove_dir_all", + "winapi", +] + +[[package]] +name = "textwrap" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" +dependencies = [ + "unicode-width", +] + +[[package]] +name = "threadpool" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa" +dependencies = [ + "num_cpus", +] + +[[package]] +name = "time" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "time" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cde1cf55178e0293453ba2cca0d5f8392a922e52aa958aee9c28ed02becc6d03" +dependencies = [ + "libc", +] + +[[package]] +name = "tiny_http" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce51b50006056f590c9b7c3808c3bd70f0d1101666629713866c227d6e58d39" +dependencies = [ + "ascii", + "chrono", + "chunked_transfer", + "log", + "url", +] + +[[package]] +name = "tinyvec" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f83b2a3d4d9091d0abd7eba4dc2710b1718583bd4d8992e2190720ea38f391f7" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" + +[[package]] +name = "twoway" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59b11b2b5241ba34be09c3cc85a36e56e48f9888862e19cedf23336d35316ed1" +dependencies = [ + "memchr", +] + +[[package]] +name = "twoway" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c57ffb460d7c24cd6eda43694110189030a3d1dfe418416d9468fd1c1d290b47" +dependencies = [ + "memchr", + "unchecked-index", +] + +[[package]] +name = "typed-arena" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9b2228007eba4120145f785df0f6c92ea538f5a3635a612ecf4e334c8c1446d" + +[[package]] +name = "typenum" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b63708a265f51345575b27fe43f9500ad611579e764c79edbc2037b1121959ec" + +[[package]] +name = "ucd-trie" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" + +[[package]] +name = "unchecked-index" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eeba86d422ce181a719445e51872fa30f1f7413b62becb52e95ec91aa262d85c" + +[[package]] +name = "unicase" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" +dependencies = [ + "version_check", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a01404663e3db436ed2746d9fefef640d868edae3cceb81c3b8d5732fda678f" + +[[package]] +name = "unicode-normalization" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-width" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" + +[[package]] +name = "unicode-xid" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" + +[[package]] +name = "unicode_categories" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" + +[[package]] +name = "url" +version = "2.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c" +dependencies = [ + "form_urlencoded", + "idna", + "matches", + "percent-encoding", +] + +[[package]] +name = "vec_map" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" + +[[package]] +name = "version_check" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe" + +[[package]] +name = "walkdir" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" +dependencies = [ + "same-file", + "winapi", + "winapi-util", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.10.2+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "xdg" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de4cfc7dc9727713f386aadce9496f1ed64ea368d9f1f813a54d0f98f8741286" +dependencies = [ + "dirs", +] + +[[package]] +name = "xml-rs" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2d7d3948613f75c98fd9328cfdcc45acc4d360655289d0a7d4ec931392200a3" + +[[package]] +name = "yaml-rust" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" +dependencies = [ + "linked-hash-map", +] diff --git a/tools/cheddar/Cargo.toml b/tools/cheddar/Cargo.toml new file mode 100644 index 000000000000..6cc8163c730a --- /dev/null +++ b/tools/cheddar/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "cheddar" +version = "0.2.0" +authors = ["Vincent Ambo <mail@tazj.in>"] +edition = "2018" + +[dependencies] +clap = "2.33" +comrak = "0.10" +lazy_static = "1.4" +rouille = "3.0" +syntect = "4.5.0" +serde_json = "1.0" +regex = "1.4" + +[dependencies.serde] +version = "1.0" +features = [ "derive" ] diff --git a/tools/cheddar/README.md b/tools/cheddar/README.md new file mode 100644 index 000000000000..706f3b62d552 --- /dev/null +++ b/tools/cheddar/README.md @@ -0,0 +1,21 @@ +cheddar +======= + +Cheddar is a tiny Rust tool that uses [syntect][] to render source code to +syntax-highlighted HTML. + +It's invocation is compatible with `cgit` filters, i.e. data is read from +`stdin` and the filename is taken from `argv`: + +```shell +cat README.md | cheddar README.md > README.html + +``` + +In fact, if you are looking at this file on git.tazj.in chances are that it was +rendered by cheddar. + +The name was chosen because I was eyeing a pack of cheddar-flavoured crisps +while thinking about name selection. + +[syntect]: https://github.com/trishume/syntect diff --git a/tools/cheddar/build.rs b/tools/cheddar/build.rs new file mode 100644 index 000000000000..b63b2e337851 --- /dev/null +++ b/tools/cheddar/build.rs @@ -0,0 +1,50 @@ +//! Build script that can be used outside of Nix builds to inject the +//! BAT_SYNTAXES variable when building in development mode. +//! +//! Note that this script assumes that cheddar is in a checkout of the +//! TVL depot. + +use std::process::Command; + +static BAT_SYNTAXES: &str = "BAT_SYNTAXES"; +static ERROR_MESSAGE: &str = r#"Failed to build syntax set. + +When building during development, cheddar expects to be in a checkout +of the TVL depot. This is required to automatically build the syntax +highlighting files that are needed at compile time. + +As cheddar can not automatically detect the location of the syntax +files, you must set the `BAT_SYNTAXES` environment variable to the +right path. + +The expected syntax files are at //third_party/bat_syntaxes in the +depot."#; + +fn main() { + // Do nothing if the variable is already set (e.g. via Nix) + if let Ok(_) = std::env::var(BAT_SYNTAXES) { + return; + } + + // Otherwise ask Nix to build it and inject the result. + let output = Command::new("nix-build") + .arg("-A").arg("third_party.bat_syntaxes") + // ... assuming cheddar is at //tools/cheddar ... + .arg("../..") + .output() + .expect(ERROR_MESSAGE); + + if !output.status.success() { + eprintln!("{}\nNix output: {}", ERROR_MESSAGE, String::from_utf8_lossy(&output.stderr)); + return; + } + + let out_path = String::from_utf8(output.stdout) + .expect("Nix returned invalid output after building syntax set"); + + // Return an instruction to Cargo that will set the environment + // variable during rustc calls. + // + // https://doc.rust-lang.org/cargo/reference/build-scripts.html#cargorustc-envvarvalue + println!("cargo:rustc-env={}={}", BAT_SYNTAXES, out_path.trim()); +} diff --git a/tools/cheddar/default.nix b/tools/cheddar/default.nix new file mode 100644 index 000000000000..c8d7ba5ffef2 --- /dev/null +++ b/tools/cheddar/default.nix @@ -0,0 +1,12 @@ +{ depot, pkgs, ... }: + +depot.third_party.naersk.buildPackage { + src = ./.; + doDoc = false; + + override = x: { + # Use our custom bat syntax set, which is everything from upstream, + # plus additional languages we care about. + BAT_SYNTAXES = "${depot.third_party.bat_syntaxes}"; + }; +} diff --git a/tools/cheddar/src/bin/cheddar.rs b/tools/cheddar/src/bin/cheddar.rs new file mode 100644 index 000000000000..58ef32a1b432 --- /dev/null +++ b/tools/cheddar/src/bin/cheddar.rs @@ -0,0 +1,135 @@ +//! This file defines the binary for cheddar, which can be interacted +//! with in two different ways: +//! +//! 1. As a CLI tool that acts as a cgit filter. +//! 2. As a long-running HTTP server that handles rendering requests +//! (matching the SourceGraph protocol). +use clap::{App, Arg}; +use rouille::Response; +use rouille::{router, try_or_400}; +use serde::Deserialize; +use serde_json::json; +use std::collections::HashMap; +use std::io; + +use cheddar::{THEMES, format_code, format_markdown}; + +// Server endpoint for rendering the syntax of source code. This +// replaces the 'syntect_server' component of Sourcegraph. +fn code_endpoint(request: &rouille::Request) -> rouille::Response { + #[derive(Deserialize)] + struct SourcegraphQuery { + filepath: String, + theme: String, + code: String, + } + + let query: SourcegraphQuery = try_or_400!(rouille::input::json_input(request)); + let mut buf: Vec<u8> = Vec::new(); + + // We don't use syntect with the sourcegraph themes bundled + // currently, so let's fall back to something that is kind of + // similar (tm). + let theme = &THEMES.themes[match query.theme.as_str() { + "Sourcegraph (light)" => "Solarized (light)", + _ => "Solarized (dark)", + }]; + + format_code(theme, &mut query.code.as_bytes(), &mut buf, &query.filepath); + + Response::json(&json!({ + "is_plaintext": false, + "data": String::from_utf8_lossy(&buf) + })) +} + +// Server endpoint for rendering a Markdown file. +fn markdown_endpoint(request: &rouille::Request) -> rouille::Response { + let mut texts: HashMap<String, String> = try_or_400!(rouille::input::json_input(request)); + + for text in texts.values_mut() { + let mut buf: Vec<u8> = Vec::new(); + format_markdown(&mut text.as_bytes(), &mut buf); + *text = String::from_utf8_lossy(&buf).to_string(); + } + + Response::json(&texts) +} + +fn highlighting_server(listen: &str) { + println!("Starting syntax highlighting server on '{}'", listen); + + rouille::start_server(listen, move |request| { + router!(request, + // Markdown rendering route + (POST) (/markdown) => { + markdown_endpoint(request) + }, + + // Code rendering route + (POST) (/) => { + code_endpoint(request) + }, + + _ => { + rouille::Response::empty_404() + }, + ) + }); +} + +fn main() { + // Parse the command-line flags passed to cheddar to determine + // whether it is running in about-filter mode (`--about-filter`) + // and what file extension has been supplied. + let matches = App::new("cheddar") + .about("TVL's syntax highlighter") + .arg( + Arg::with_name("about-filter") + .help("Run as a cgit about-filter (renders Markdown)") + .long("about-filter") + .takes_value(false), + ) + .arg( + Arg::with_name("sourcegraph-server") + .help("Run as a Sourcegraph compatible web-server") + .long("sourcegraph-server") + .takes_value(false), + ) + .arg( + Arg::with_name("listen") + .help("Address to listen on") + .long("listen") + .takes_value(true), + ) + .arg(Arg::with_name("filename").help("File to render").index(1)) + .get_matches(); + + if matches.is_present("sourcegraph-server") { + highlighting_server( + matches + .value_of("listen") + .expect("Listening address is required for server mode"), + ); + return; + } + + let filename = matches.value_of("filename").expect("filename is required"); + + let stdin = io::stdin(); + let mut in_handle = stdin.lock(); + + let stdout = io::stdout(); + let mut out_handle = stdout.lock(); + + if matches.is_present("about-filter") && filename.ends_with(".md") { + format_markdown(&mut in_handle, &mut out_handle); + } else { + format_code( + &THEMES.themes["InspiredGitHub"], + &mut in_handle, + &mut out_handle, + filename, + ); + } +} diff --git a/tools/cheddar/src/lib.rs b/tools/cheddar/src/lib.rs new file mode 100644 index 000000000000..e5d7aad1e62c --- /dev/null +++ b/tools/cheddar/src/lib.rs @@ -0,0 +1,337 @@ +//! This file implements the rendering logic of cheddar with public +//! functions for syntax-highlighting code and for turning Markdown +//! into HTML with TVL extensions. +use comrak::arena_tree::Node; +use comrak::nodes::{Ast, AstNode, NodeCodeBlock, NodeHtmlBlock, NodeValue}; +use comrak::{format_html, parse_document, Arena, ComrakOptions}; +use lazy_static::lazy_static; +use regex::Regex; +use std::cell::RefCell; +use std::collections::HashMap; +use std::env; +use std::ffi::OsStr; +use std::io; +use std::io::BufRead; +use std::io::Write; +use std::path::Path; +use syntect::dumps::from_binary; +use syntect::easy::HighlightLines; +use syntect::highlighting::{Theme, ThemeSet}; +use syntect::parsing::{SyntaxReference, SyntaxSet}; +use syntect::util::LinesWithEndings; + +use syntect::html::{ + append_highlighted_html_for_styled_line, start_highlighted_html_snippet, IncludeBackground, +}; + +#[cfg(test)] +mod tests; + +lazy_static! { + // Load syntaxes lazily. Initialisation might not be required in + // the case of Markdown rendering (if there's no code blocks + // within the document). + // + // Note that the syntax set is included from the path pointed to + // by the BAT_SYNTAXES environment variable at compile time. This + // variable is populated by Nix and points to TVL's syntax set. + static ref SYNTAXES: SyntaxSet = from_binary(include_bytes!(env!("BAT_SYNTAXES"))); + pub static ref THEMES: ThemeSet = ThemeSet::load_defaults(); + + // Configure Comrak's Markdown rendering with all the bells & + // whistles! + static ref MD_OPTS: ComrakOptions = { + let mut options = ComrakOptions::default(); + + // Enable non-standard Markdown features: + options.extension.strikethrough = true; + options.extension.tagfilter = true; + options.extension.table = true; + options.extension.autolink = true; + options.extension.tasklist = true; + options.extension.header_ids = Some(String::new()); // yyeeesss! + options.extension.footnotes = true; + options.extension.description_lists = true; + options.extension.front_matter_delimiter = Some("---".to_owned()); + + // Required for tagfilter + options.render.unsafe_ = true; + + options + }; + + // Configures a map of specific filenames to languages, for cases + // where the detection by extension or other heuristics fails. + static ref FILENAME_OVERRIDES: HashMap<&'static str, &'static str> = { + let mut map = HashMap::new(); + // rules.pl is the canonical name of the submit rule file in + // Gerrit, which is written in Prolog. + map.insert("rules.pl", "Prolog"); + map + }; + + // Default shortlink set used in cheddar (i.e. TVL's shortlinks) + static ref TVL_LINKS: Vec<Shortlink> = vec![ + // TVL shortlinks for bugs and changelists (e.g. b/123, + // cl/123). Coincidentally these have the same format, which + // makes the initial implementation easy. + Shortlink { + pattern: Regex::new(r#"\b(?P<type>b|cl)/(?P<dest>\d+)\b"#).unwrap(), + replacement: "[$type/$dest](https://$type.tvl.fyi/$dest)", + } + ]; +} + +/// Structure that describes a single shortlink that should be +/// automatically highlighted. Highlighting is performed as a string +/// replacement over input Markdown. +pub struct Shortlink { + /// Short link pattern to recognise. Make sure to anchor these + /// correctly. + pub pattern: Regex, + + /// Replacement string, as per the documentation of + /// [`Regex::replace`]. + pub replacement: &'static str, +} + +// HTML fragment used when rendering inline blocks in Markdown documents. +// Emulates the GitHub style (subtle background hue and padding). +const BLOCK_PRE: &str = "<pre style=\"background-color:#f6f8fa;padding:16px;\">\n"; + +fn should_continue(res: &io::Result<usize>) -> bool { + match *res { + Ok(n) => n > 0, + Err(_) => false, + } +} + +// This function is taken from the Comrak documentation. +fn iter_nodes<'a, F>(node: &'a AstNode<'a>, f: &F) +where + F: Fn(&'a AstNode<'a>), +{ + f(node); + for c in node.children() { + iter_nodes(c, f); + } +} + +// Many of the syntaxes in the syntax list have random capitalisations, which +// means that name matching for the block info of a code block in HTML fails. +// +// Instead, try finding a syntax match by comparing case insensitively (for +// ASCII characters, anyways). +fn find_syntax_case_insensitive(info: &str) -> Option<&'static SyntaxReference> { + // TODO(tazjin): memoize this lookup + SYNTAXES + .syntaxes() + .iter() + .rev() + .find(|&s| info.eq_ignore_ascii_case(&s.name)) +} + +// Replaces code-block inside of a Markdown AST with HTML blocks rendered by +// syntect. This enables static (i.e. no JavaScript) syntax highlighting, even +// of complex languages. +fn highlight_code_block(code_block: &NodeCodeBlock) -> NodeValue { + let theme = &THEMES.themes["InspiredGitHub"]; + let info = String::from_utf8_lossy(&code_block.info); + + let syntax = find_syntax_case_insensitive(&info) + .or_else(|| SYNTAXES.find_syntax_by_extension(&info)) + .unwrap_or_else(|| SYNTAXES.find_syntax_plain_text()); + + let code = String::from_utf8_lossy(&code_block.literal); + + let rendered = { + // Write the block preamble manually to get exactly the + // desired layout: + let mut hl = HighlightLines::new(syntax, theme); + let mut buf = BLOCK_PRE.to_string(); + + for line in LinesWithEndings::from(&code) { + let regions = hl.highlight(line, &SYNTAXES); + append_highlighted_html_for_styled_line(®ions[..], IncludeBackground::No, &mut buf); + } + + buf.push_str("</pre>"); + buf + }; + + let mut block = NodeHtmlBlock::default(); + block.literal = rendered.into_bytes(); + + NodeValue::HtmlBlock(block) +} + +// Supported callout elements (which each have their own distinct rendering): +enum Callout { + Todo, + Warning, + Question, + Tip, +} + +// Determine whether the first child of the supplied node contains a text that +// should cause a callout section to be rendered. +fn has_callout<'a>(node: &Node<'a, RefCell<Ast>>) -> Option<Callout> { + match node.first_child().map(|c| c.data.borrow()) { + Some(child) => match &child.value { + NodeValue::Text(text) => { + if text.starts_with(b"TODO") { + return Some(Callout::Todo); + } else if text.starts_with(b"WARNING") { + return Some(Callout::Warning); + } else if text.starts_with(b"QUESTION") { + return Some(Callout::Question); + } else if text.starts_with(b"TIP") { + return Some(Callout::Tip); + } + + None + } + _ => None, + }, + _ => None, + } +} + +// Replace instances of known shortlinks in the input document with +// Markdown syntax for a highlighted link. +fn linkify_shortlinks(mut text: String, shortlinks: &[Shortlink]) -> String { + for link in shortlinks { + text = link + .pattern + .replace_all(&text, link.replacement) + .to_string(); + } + + return text; +} + +fn format_callout_paragraph(callout: Callout) -> NodeValue { + let class = match callout { + Callout::Todo => "cheddar-todo", + Callout::Warning => "cheddar-warning", + Callout::Question => "cheddar-question", + Callout::Tip => "cheddar-tip", + }; + + let mut block = NodeHtmlBlock::default(); + block.literal = format!("<p class=\"cheddar-callout {}\">", class).into_bytes(); + NodeValue::HtmlBlock(block) +} + +pub fn format_markdown_with_shortlinks<R: BufRead, W: Write>( + reader: &mut R, + writer: &mut W, + shortlinks: &[Shortlink], +) { + let document = { + let mut buffer = String::new(); + reader + .read_to_string(&mut buffer) + .expect("reading should work"); + buffer + }; + + let arena = Arena::new(); + let root = parse_document(&arena, &linkify_shortlinks(document, shortlinks), &MD_OPTS); + + // This node must exist with a lifetime greater than that of the parsed AST + // in case that callouts are encountered (otherwise insertion into the tree + // is not possible). + let mut p_close_value = NodeHtmlBlock::default(); + p_close_value.literal = b"</p>".to_vec(); + + let p_close_node = Ast::new(NodeValue::HtmlBlock(p_close_value)); + let p_close = Node::new(RefCell::new(p_close_node)); + + // Special features of Cheddar are implemented by traversing the + // arena and reacting on nodes that we might want to modify. + iter_nodes(root, &|node| { + let mut ast = node.data.borrow_mut(); + let new = match &ast.value { + // Syntax highlighting is implemented by replacing the + // code block node with literal HTML. + NodeValue::CodeBlock(code) => Some(highlight_code_block(code)), + + NodeValue::Paragraph => { + if let Some(callout) = has_callout(node) { + node.insert_after(&p_close); + Some(format_callout_paragraph(callout)) + } else { + None + } + } + _ => None, + }; + + if let Some(new_value) = new { + ast.value = new_value + } + }); + + format_html(root, &MD_OPTS, writer).expect("Markdown rendering failed"); +} + +pub fn format_markdown<R: BufRead, W: Write>(reader: &mut R, writer: &mut W) { + format_markdown_with_shortlinks(reader, writer, &TVL_LINKS) +} + +fn find_syntax_for_file(filename: &str) -> &'static SyntaxReference { + (*FILENAME_OVERRIDES) + .get(filename) + .and_then(|name| SYNTAXES.find_syntax_by_name(name)) + .or_else(|| { + Path::new(filename) + .extension() + .and_then(OsStr::to_str) + .and_then(|s| SYNTAXES.find_syntax_by_extension(s)) + }) + .unwrap_or_else(|| SYNTAXES.find_syntax_plain_text()) +} + +pub fn format_code<R: BufRead, W: Write>( + theme: &Theme, + reader: &mut R, + writer: &mut W, + filename: &str, +) { + let mut linebuf = String::new(); + + // Get the first line, we might need it for syntax identification. + let mut read_result = reader.read_line(&mut linebuf); + let syntax = find_syntax_for_file(filename); + + let mut hl = HighlightLines::new(syntax, theme); + let (mut outbuf, bg) = start_highlighted_html_snippet(theme); + + // Rather than using the `lines` iterator, read each line manually + // and maintain buffer state. + // + // This is done because the syntax highlighter requires trailing + // newlines to be efficient, and those are stripped in the lines + // iterator. + while should_continue(&read_result) { + let regions = hl.highlight(&linebuf, &SYNTAXES); + + append_highlighted_html_for_styled_line( + ®ions[..], + IncludeBackground::IfDifferent(bg), + &mut outbuf, + ); + + // immediately output the current state to avoid keeping + // things in memory + write!(writer, "{}", outbuf).expect("write should not fail"); + + // merry go round again + linebuf.clear(); + outbuf.clear(); + read_result = reader.read_line(&mut linebuf); + } + + writeln!(writer, "</pre>").expect("write should not fail"); +} diff --git a/tools/cheddar/src/tests.rs b/tools/cheddar/src/tests.rs new file mode 100644 index 000000000000..5b7b1cc52a95 --- /dev/null +++ b/tools/cheddar/src/tests.rs @@ -0,0 +1,97 @@ +use super::*; +use std::io::BufReader; + +// Markdown rendering expectation, ignoring leading and trailing +// whitespace in the input and output. +fn expect_markdown(input: &str, expected: &str) { + let mut input_buf = BufReader::new(input.trim().as_bytes()); + let mut out_buf: Vec<u8> = vec![]; + format_markdown(&mut input_buf, &mut out_buf); + + let out_string = String::from_utf8(out_buf).expect("output should be UTF8"); + assert_eq!(out_string.trim(), expected.trim()); +} + +#[test] +fn renders_simple_markdown() { + expect_markdown("hello", "<p>hello</p>\n"); +} + +#[test] +fn renders_callouts() { + expect_markdown( + "TODO some task.", + r#"<p class="cheddar-callout cheddar-todo"> +TODO some task. +</p> +"#, + ); + + expect_markdown( + "WARNING: be careful", + r#"<p class="cheddar-callout cheddar-warning"> +WARNING: be careful +</p> +"#, + ); + + expect_markdown( + "TIP: note the thing", + r#"<p class="cheddar-callout cheddar-tip"> +TIP: note the thing +</p> +"#, + ); +} + +#[test] +fn renders_code_snippets() { + expect_markdown( + r#" +Code: +```nix +toString 42 +``` +"#, + r#" +<p>Code:</p> +<pre style="background-color:#f6f8fa;padding:16px;"> +<span style="color:#62a35c;">toString </span><span style="color:#0086b3;">42 +</span></pre> +"#, + ); +} + +#[test] +fn highlights_bug_link() { + expect_markdown( + "Please look at b/123.", + "<p>Please look at <a href=\"https://b.tvl.fyi/123\">b/123</a>.</p>", + ); +} + +#[test] +fn highlights_cl_link() { + expect_markdown( + "Please look at cl/420.", + "<p>Please look at <a href=\"https://cl.tvl.fyi/420\">cl/420</a>.</p>", + ); +} + +#[test] +fn highlights_multiple_shortlinks() { + expect_markdown( + "Please look at cl/420, b/123.", + "<p>Please look at <a href=\"https://cl.tvl.fyi/420\">cl/420</a>, <a href=\"https://b.tvl.fyi/123\">b/123</a>.</p>", + ); + + expect_markdown( + "b/213/cl/213 are different things", + "<p><a href=\"https://b.tvl.fyi/213\">b/213</a>/<a href=\"https://cl.tvl.fyi/213\">cl/213</a> are different things</p>", + ); +} + +#[test] +fn ignores_invalid_shortlinks() { + expect_markdown("b/abc is not a real bug", "<p>b/abc is not a real bug</p>"); +} diff --git a/tools/depot-build.nix b/tools/depot-build.nix new file mode 100644 index 000000000000..62b4c7fc4476 --- /dev/null +++ b/tools/depot-build.nix @@ -0,0 +1,8 @@ +# Utility script for building any arbitrary depot path in its folder. +{ pkgs, ... }: + +pkgs.writeShellScriptBin "depot-build" '' + TARGET=$(git rev-parse --show-prefix | sed 's|/$||') + echo "Building //$TARGET" + nix-build -A $(echo $TARGET | sed 's|/|.|g') $(${pkgs.git}/bin/git rev-parse --show-toplevel) +'' diff --git a/tools/depot-nixpkgs-update.nix b/tools/depot-nixpkgs-update.nix new file mode 100644 index 000000000000..2475ca2e2330 --- /dev/null +++ b/tools/depot-nixpkgs-update.nix @@ -0,0 +1,44 @@ +{ pkgs, depot, ... }: + +let + inherit (depot.nix) + getBins + ; + + stableRelease = "21.05"; + + channelsUrl = "https://channels.nixos.org"; + archiveUrl = "https://github.com/NixOS/nixpkgs/archive/"; + + bins = getBins pkgs.nix [ "nix-prefetch-url" ] + // getBins pkgs.curl [ "curl" ] + ; + +in + +pkgs.writers.writeDashBin "depot-nixpkgs-update" '' + set -e + + printSet() { + setname="$1" + shift + channel="$1" + shift + + commit="$(${bins.curl} -L "${channelsUrl}/$channel/git-revision")" + date="$(curl -i -L "${channelsUrl}/$channel/git-revision" \ + | grep ^last-modified \ + | sed 's/^last-modified: \(.\+\)\r/\1/')" + hash="$(${bins.nix-prefetch-url} --unpack --type sha256 "${archiveUrl}/$commit.tar.gz")" + + printf '%s\n' " + # Tracking $channel as of $(date --rfc-3339=date --date="$date"). + $setname = { + commit = \"$commit\"; + sha256 = \"$hash\"; + };" + } + + printSet unstableHashes nixos-unstable + printSet stableHashes nixos-${stableRelease} +'' diff --git a/tools/depot-scanner/OWNERS b/tools/depot-scanner/OWNERS new file mode 100644 index 000000000000..cefacea4d049 --- /dev/null +++ b/tools/depot-scanner/OWNERS @@ -0,0 +1,3 @@ +inherit: true +owners: + - riking diff --git a/tools/depot-scanner/default.nix b/tools/depot-scanner/default.nix new file mode 100644 index 000000000000..e6fd5dec292c --- /dev/null +++ b/tools/depot-scanner/default.nix @@ -0,0 +1,16 @@ +{ depot, pkgs, ...}: + +let + localProto = depot.nix.buildGo.grpc { + name = "code.tvl.fyi/tools/depot-scanner/proto"; + proto = ./depot_scanner.proto; + }; +in depot.nix.buildGo.program { + name = "depot-scanner"; + srcs = [ + ./main.go + ]; + deps = [ + localProto + ]; +} // { inherit localProto; meta.ci = false; } diff --git a/tools/depot-scanner/depot_scanner.proto b/tools/depot-scanner/depot_scanner.proto new file mode 100644 index 000000000000..5249daebf495 --- /dev/null +++ b/tools/depot-scanner/depot_scanner.proto @@ -0,0 +1,46 @@ +// Copyright 2020 TVL +// SPDX-License-Identifier: MIT + +syntax = "proto3"; +package tvl.tools.depot_scanner; +option go_package = "code.tvl.fyi/tools/depot-scanner/proto"; + +enum PathType { + UNKNOWN = 0; + DEPOT = 1; + STORE = 2; + CORE = 3; +} + +message ScanRequest { + // Which revision of the depot + string revision = 1; + string attr = 2; + // Optionally, the attr to evaluate can be provided as a path to a folder or a + // .nix file. This is used by the HTTP service. + string attrAsPath = 3; +} + +message ScanResponse { + repeated string depotPath = 1; + repeated string nixStorePath = 2; + repeated string corePkgsPath = 4; + repeated string otherPath = 3; + + bytes derivation = 5; +} + +message ArchiveRequest { + repeated string depotPath = 1; +} + +message ArchiveChunk { + bytes chunk = 1; +} + +service DepotScanService { + rpc Scan(ScanRequest) returns (ScanResponse); + + rpc MakeArchive(ArchiveRequest) returns (stream ArchiveChunk); +} + diff --git a/tools/depot-scanner/go.mod b/tools/depot-scanner/go.mod new file mode 100644 index 000000000000..bdd22fc1ef01 --- /dev/null +++ b/tools/depot-scanner/go.mod @@ -0,0 +1,3 @@ +module code.tvl.fyi/tools/depot-scanner + +go 1.14 diff --git a/tools/depot-scanner/main.go b/tools/depot-scanner/main.go new file mode 100644 index 000000000000..273190258958 --- /dev/null +++ b/tools/depot-scanner/main.go @@ -0,0 +1,222 @@ +package main + +import ( + "bufio" + "flag" + "fmt" + "io" + "os" + "os/exec" + "strings" + + pb "code.tvl.fyi/tools/depot-scanner/proto" +) + +var nixInstantiatePath = flag.String("nix-bin", "/run/current-system/sw/bin/nix-instantiate", "path to nix-instantiate") +var depotRoot = flag.String("depot", envOr("DEPOT_ROOT", "/depot/"), "path to tvl.fyi depot at current canon") +var nixStoreRoot = flag.String("store-path", "/nix/store/", "prefix for all valid nix store paths") + +var modeFlag = flag.String("mode", modeArchive, "operation mode. valid values: tar, print") +var onlyFlag = flag.String("only", "", "only enable the listed output types, comma separated. valid values: DEPOT, STORE, CORE, UNKNOWN") +var relativeFlag = flag.Bool("relpath", false, "when printing paths, print them relative to the root of their path type") + +const ( + modeArchive = "tar" + modePrint = "print" +) + +const ( + // String that identifies a path as belonging to nix corepkgs. + corePkgsString = "/share/nix/corepkgs/" + + depotTraceString = "trace: depot-scan: " +) + +type fileScanType int + +const ( + unknownPath fileScanType = iota + depotPath + nixStorePath + corePkgsPath +) + +func launchNix(attr string) (*exec.Cmd, io.ReadCloser, io.ReadCloser, error) { + cmd := exec.Command(*nixInstantiatePath, "--trace-file-access", "-A", attr) + stdout, err := cmd.StdoutPipe() + if err != nil { + return nil, nil, nil, err + } + stderr, err := cmd.StderrPipe() + if err != nil { + stdout.Close() + return nil, nil, nil, err + } + + err = cmd.Start() + if err != nil { + stdout.Close() + stderr.Close() + return nil, nil, nil, err + } + + return cmd, stdout, stderr, nil +} + +func categorizePath(path string) fileScanType { + if strings.HasPrefix(path, *nixStoreRoot) { + if strings.Contains(path, corePkgsString) { + return corePkgsPath + } + return nixStorePath + } else if strings.HasPrefix(path, *depotRoot) { + return depotPath + } else if strings.Contains(path, corePkgsString) { + return corePkgsPath + } + return unknownPath +} + +func addPath(path string, out map[fileScanType]map[string]struct{}) { + cat := categorizePath(path) + if out[cat] == nil { + out[cat] = make(map[string]struct{}) + } + + out[cat][path] = struct{}{} +} + +func consumeOutput(stdout, stderr io.ReadCloser) (map[fileScanType]map[string]struct{}, string, error) { + result := make(map[fileScanType]map[string]struct{}) + + scanner := bufio.NewScanner(stderr) + for scanner.Scan() { + line := scanner.Text() + if strings.HasPrefix(line, depotTraceString) { + addPath(strings.TrimPrefix(line, depotTraceString), result) + } + } + if scanner.Err() != nil { + return nil, "", scanner.Err() + } + + // Get derivation path + derivPath := "" + scanner = bufio.NewScanner(stdout) + for scanner.Scan() { + line := scanner.Text() + if strings.HasPrefix(line, *nixStoreRoot) { + derivPath = line + // consume the rest of the output + } + } + if scanner.Err() != nil { + return nil, "", scanner.Err() + } + + return result, derivPath, nil +} + +func main() { + flag.Parse() + + checkDepotRoot() + + enabledPathTypes := make(map[pb.PathType]bool, 4) + if len(*onlyFlag) > 0 { + enabledOutputs := strings.Split(*onlyFlag, ",") + for _, v := range enabledOutputs { + i, ok := pb.PathType_value[strings.ToUpper(v)] + if !ok { + fmt.Fprintln(os.Stderr, "warning: unrecognized PathType name: ", v) + continue + } + enabledPathTypes[pb.PathType(i)] = true + } + } else { + // Default + enabledPathTypes = map[pb.PathType]bool{ + pb.PathType_UNKNOWN: true, + pb.PathType_DEPOT: true, + pb.PathType_STORE: true, + pb.PathType_CORE: true, + } + } + + cmd, stdout, stderr, err := launchNix(flag.Arg(0)) + if err != nil { + panic(fmt.Errorf("could not launch nix: %w", err)) + } + results, derivPath, err := consumeOutput(stdout, stderr) + if err != nil { + err2 := cmd.Wait() + if err2 != nil { + panic(fmt.Errorf("nix-instantiate failed: %w\nadditionally, while reading output: %w", err2, err)) + } + panic(fmt.Errorf("problem reading nix output: %w", err)) + } + err = cmd.Wait() + if err != nil { + panic(fmt.Errorf("nix-instantiate failed: %w", err)) + } + + _ = derivPath + + if *modeFlag == "print" { + if enabledPathTypes[pb.PathType_STORE] { + for k, _ := range results[nixStorePath] { + if *relativePath { + k = strings.TrimPrefix(k, *nixStoreRoot) + k = strings.TrimPrefix(k, "/") + } + fmt.Println(k) + } + } + if enabledPathTypes[pb.PathType_DEPOT] { + for k, _ := range results[depotPath] { + if *relativeFlag { + k = strings.TrimPrefix(k, *depotRoot) + k = strings.TrimPrefix(k, "/") + } + fmt.Println(k) + } + } + if enabledPathTypes[pb.PathType_CORE] { + for k, _ := range results[corePkgsPath] { + // TODO relativeFlag + fmt.Println(k) + } + } + if enabledPathTypes[pb.PathType_UNKNOWN] { + for k, _ := range results[unknownPath] { + fmt.Println(k) + } + } + } else { + panic("unimplemented") + } +} + +func envOr(envVar, def string) string { + v := os.Getenv(envVar) + if v == "" { + return def + } + return v +} + +func checkDepotRoot() { + if *depotRoot == "" { + fmt.Fprintln(os.Stderr, "error: DEPOT_ROOT / -depot not set") + os.Exit(2) + } + _, err := os.Stat(*depotRoot) + if os.IsNotExist(err) { + fmt.Fprintf(os.Stderr, "error: %q does not exist\ndid you forget to set DEPOT_ROOT / --depot ?\n", *depotRoot) + os.Exit(1) + } else if err != nil { + fmt.Fprintf(os.Stderr, "error: could not stat %q: %v\n", *depotRoot, err) + os.Exit(1) + } + +} diff --git a/tools/emacs-pkgs/buildEmacsPackage.nix b/tools/emacs-pkgs/buildEmacsPackage.nix new file mode 100644 index 000000000000..160c0626136d --- /dev/null +++ b/tools/emacs-pkgs/buildEmacsPackage.nix @@ -0,0 +1,34 @@ +# Builder for depot-internal Emacs packages. Packages built using this +# builder are added into the Emacs packages fixpoint under +# `emacsPackages.tvlPackages`, which in turn makes it possible to use +# them with special Emacs features like native compilation. +# +# Arguments passed to the builder are the same as +# emacsPackages.trivialBuild, except: +# +# * packageRequires is not used +# +# * externalRequires takes a selection function for packages from +# emacsPackages +# +# * internalRequires takes other depot packages +{ pkgs, ... }: + +buildArgs: + +pkgs.callPackage({ emacsPackages }: + +let + # Select external dependencies from the emacsPackages set + externalDeps = (buildArgs.externalRequires or (_: [])) emacsPackages; + + # Override emacsPackages for depot-internal packages + internalDeps = map (p: p.override { inherit emacsPackages; }) + (buildArgs.internalRequires or []); + + trivialBuildArgs = builtins.removeAttrs buildArgs [ + "externalRequires" "internalRequires" + ] // { + packageRequires = externalDeps ++ internalDeps; + }; +in emacsPackages.trivialBuild trivialBuildArgs) {} diff --git a/tools/emacs-pkgs/defzone/defzone.el b/tools/emacs-pkgs/defzone/defzone.el new file mode 100644 index 000000000000..ffd359e5ff83 --- /dev/null +++ b/tools/emacs-pkgs/defzone/defzone.el @@ -0,0 +1,60 @@ +;;; defzone.el --- Generate zone files from Elisp -*- lexical-binding: t; -*- + +(require 'dash) +(require 'dash-functional) +(require 's) + +(defun record-to-record (zone record &optional subdomain) + "Evaluate a record definition and turn it into a zone file + record in ZONE, optionally prefixed with SUBDOMAIN." + + (cl-labels ((plist->alist (plist) + (when plist + (cons + (cons (car plist) (cadr plist)) + (plist->alist (cddr plist)))))) + (let ((name (if subdomain (s-join "." (list subdomain zone)) zone))) + (pcase record + ;; SOA RDATA (RFC 1035; 3.3.13) + ((and `(SOA . (,ttl . ,keys)) + (let (map (:mname mname) (:rname rname) (:serial serial) + (:refresh refresh) (:retry retry) (:expire expire) + (:minimum min)) + (plist->alist keys))) + (if-let ((missing (-filter #'null (not (list mname rname serial + refresh retry expire min))))) + (error "Missing fields in SOA record: %s" missing) + (format "%s %s IN SOA %s %s %s %s %s %s %s" + name ttl mname rname serial refresh retry expire min))) + + (`(NS . (,ttl . ,targets)) + (->> targets + (-map (lambda (target) (format "%s %s IN NS %s" name ttl target))) + (s-join "\n"))) + + (`(MX . (,ttl . ,pairs)) + (->> pairs + (-map (-lambda ((preference . exchange)) + (format "%s %s IN MX %s %s" name ttl preference exchange))) + (s-join "\n"))) + + (`(TXT ,ttl ,text) (format "%s %s IN TXT %s" name ttl (prin1-to-string text))) + + (`(A . (,ttl . ,ips)) + (->> ips + (-map (lambda (ip) (format "%s %s IN A %s" name ttl ip))) + (s-join "\n"))) + + (`(CNAME ,ttl ,target) (format "%s %s IN CNAME %s" name ttl target)) + + ((and `(,sub . ,records) + (guard (stringp sub))) + (s-join "\n" (-map (lambda (r) (record-to-record zone r sub)) records))) + + (_ (error "Invalid record definition: %s" record)))))) + +(defmacro defzone (fqdn &rest records) + "Generate zone file for the zone at FQDN from a simple DSL." + (declare (indent defun)) + + `(s-join "\n" (-map (lambda (r) (record-to-record ,fqdn r)) (quote ,records)))) diff --git a/tools/emacs-pkgs/defzone/example.el b/tools/emacs-pkgs/defzone/example.el new file mode 100644 index 000000000000..e9c86d25eec8 --- /dev/null +++ b/tools/emacs-pkgs/defzone/example.el @@ -0,0 +1,45 @@ +;;; example.el - usage example for defzone macro + +(defzone "tazj.in." + (SOA 21600 + :mname "ns-cloud-a1.googledomains.com." + :rname "cloud-dns-hostmaster.google.com." + :serial 123 + :refresh 21600 + :retry 3600 + :expire 1209600 + :minimum 300) + + (NS 21600 + "ns-cloud-a1.googledomains.com." + "ns-cloud-a2.googledomains.com." + "ns-cloud-a3.googledomains.com." + "ns-cloud-a4.googledomains.com.") + + (MX 300 + (1 . "aspmx.l.google.com.") + (5 . "alt1.aspmx.l.google.com.") + (5 . "alt2.aspmx.l.google.com.") + (10 . "alt3.aspmx.l.google.com.") + (10 . "alt4.aspmx.l.google.com.")) + + (TXT 3600 "google-site-verification=d3_MI1OwD6q2OT42Vvh0I9w2u3Q5KFBu-PieNUE1Fig") + + (A 300 "34.98.120.189") + + ;; Nested record sets are indicated by a list that starts with a + ;; string (this is just joined, so you can nest multiple levels at + ;; once) + ("blog" + ;; Blog "storage engine" is in a separate DNS zone + (NS 21600 + "ns-cloud-c1.googledomains.com." + "ns-cloud-c2.googledomains.com." + "ns-cloud-c3.googledomains.com." + "ns-cloud-c4.googledomains.com.")) + + ("git" + (A 300 "34.98.120.189") + (TXT 300 "<3 edef")) + + ("files" (CNAME 300 "c.storage.googleapis.com."))) diff --git a/tools/emacs-pkgs/dottime/default.nix b/tools/emacs-pkgs/dottime/default.nix new file mode 100644 index 000000000000..b819e9c14d2c --- /dev/null +++ b/tools/emacs-pkgs/dottime/default.nix @@ -0,0 +1,7 @@ +{ depot, ... }: + +depot.tools.emacs-pkgs.buildEmacsPackage { + pname = "dottime"; + version = "1.0"; + src = ./dottime.el; +} diff --git a/tools/emacs-pkgs/dottime/dottime.el b/tools/emacs-pkgs/dottime/dottime.el new file mode 100644 index 000000000000..2446f6488f32 --- /dev/null +++ b/tools/emacs-pkgs/dottime/dottime.el @@ -0,0 +1,81 @@ +;;; dottime.el --- use dottime in the modeline +;; +;; Copyright (C) 2019 Google Inc. +;; +;; Author: Vincent Ambo <tazjin@google.com> +;; Version: 1.0 +;; Package-Requires: (cl-lib) +;; +;;; Commentary: +;; +;; This package changes the display of time in the modeline to use +;; dottime (see https://dotti.me/) instead of the standard time +;; display. +;; +;; Modeline dottime display is enabled by calling +;; `dottime-display-mode' and dottime can be used in Lisp code via +;; `dottime-format'. + +(require 'cl-lib) +(require 'time) + +(defun dottime--format-string (&optional offset prefix) + "Creates the dottime format string for `format-time-string' + based on the local timezone." + + (let* ((offset-sec (or offset (car (current-time-zone)))) + (offset-hours (/ offset-sec 60 60)) + (base (concat prefix "%m-%dT%H·%M"))) + (if (/= offset-hours 0) + (concat base (format "%0+3d" offset-hours)) + base))) + +(defun dottime--display-time-update-advice (orig) + "Function used as advice to `display-time-update' with a + rebound definition of `format-time-string' that renders all + timestamps as dottime." + + (cl-letf* ((format-orig (symbol-function 'format-time-string)) + ((symbol-function 'format-time-string) + (lambda (&rest _) + (funcall format-orig (dottime--format-string) nil t)))) + (funcall orig))) + +(defun dottime-format (&optional time offset prefix) + "Format the given TIME in dottime at OFFSET. If TIME is nil, + the current time will be used. PREFIX is prefixed to the format + string verbatim. + + OFFSET can be an integer representing an offset in seconds, or + the argument can be elided in which case the system time zone + is used." + + (format-time-string (dottime--format-string offset prefix) time t)) + +(defun dottime-display-mode (arg) + "Enable time display as dottime. Disables dottime if called + with prefix 0 or nil." + + (interactive "p") + (if (or (eq arg 0) (eq arg nil)) + (advice-remove 'display-time-update #'dottime--display-time-update-advice) + (advice-add 'display-time-update :around #'dottime--display-time-update-advice)) + (display-time-update) + + ;; Amend the time display in telega.el to use dottime. + ;; + ;; This will never display offsets in the chat window, as those are + ;; always visible in the modeline anyways. + (when (featurep 'telega) + (defun telega-ins--dottime-advice (orig timestamp) + (let* ((dtime (decode-time timestamp t)) + (current-ts (time-to-seconds (current-time))) + (ctime (decode-time current-ts)) + (today00 (telega--time-at00 current-ts ctime))) + (if (> timestamp today00) + (telega-ins (format "%02d·%02d" (nth 2 dtime) (nth 1 dtime))) + (funcall orig timestamp)))) + + (advice-add 'telega-ins--date :around #'telega-ins--dottime-advice))) + +(provide 'dottime) diff --git a/tools/emacs-pkgs/nix-util/default.nix b/tools/emacs-pkgs/nix-util/default.nix new file mode 100644 index 000000000000..ffeb1cefade7 --- /dev/null +++ b/tools/emacs-pkgs/nix-util/default.nix @@ -0,0 +1,7 @@ +{ depot, ... }: + +depot.tools.emacs-pkgs.buildEmacsPackage { + pname = "nix-util"; + version = "1.0"; + src = ./nix-util.el; +} diff --git a/tools/emacs-pkgs/nix-util/nix-util.el b/tools/emacs-pkgs/nix-util/nix-util.el new file mode 100644 index 000000000000..4b9dd31a022e --- /dev/null +++ b/tools/emacs-pkgs/nix-util/nix-util.el @@ -0,0 +1,103 @@ +;;; nix-util.el --- Utilities for dealing with Nix code. -*- lexical-binding: t; -*- +;; +;; Copyright (C) 2019 Google Inc. +;; +;; Author: Vincent Ambo <tazjin@google.com> +;; Version: 1.0 +;; Package-Requires: (json map) +;; +;;; Commentary: +;; +;; This package adds some functionality that I find useful when +;; working in Nix buffers or programs installed from Nix. + +(require 'json) +(require 'map) + +(defvar nix-depot-path "/home/tazjin/depot") + +(defun nix/prefetch-github (owner repo) ; TODO(tazjin): support different branches + "Fetch the master branch of a GitHub repository and insert the + call to `fetchFromGitHub' at point." + + (interactive "sOwner: \nsRepository: ") + + (let* (;; Keep these vars around for output insertion + (point (point)) + (buffer (current-buffer)) + (name (concat "github-fetcher/" owner "/" repo)) + (outbuf (format "*%s*" name)) + (errbuf (get-buffer-create "*github-fetcher/errors*")) + (cleanup (lambda () + (kill-buffer outbuf) + (kill-buffer errbuf) + (with-current-buffer buffer + (read-only-mode -1)))) + (prefetch-handler + (lambda (_process event) + (unwind-protect + (pcase event + ("finished\n" + (let* ((json-string (with-current-buffer outbuf + (buffer-string))) + (result (json-read-from-string json-string))) + (with-current-buffer buffer + (goto-char point) + (map-let (("rev" rev) ("sha256" sha256)) result + (read-only-mode -1) + (insert (format "fetchFromGitHub { + owner = \"%s\"; + repo = \"%s\"; + rev = \"%s\"; + sha256 = \"%s\"; +};" owner repo rev sha256)) + (indent-region point (point)))))) + (_ (with-current-buffer errbuf + (error "Failed to prefetch %s/%s: %s" + owner repo (buffer-string))))) + (funcall cleanup))))) + + ;; Fetching happens asynchronously, but we'd like to make sure the + ;; point stays in place while that happens. + (read-only-mode) + (make-process :name name + :buffer outbuf + :command `("nix-prefetch-github" ,owner ,repo) + :stderr errbuf + :sentinel prefetch-handler))) + +(defun nix/sly-from-depot (attribute) + "Start a Sly REPL configured with a Lisp matching a derivation + from my depot. + + The derivation invokes nix.buildLisp.sbclWith and is built + asynchronously. The build output is included in the error + thrown on build failures." + + (interactive "sAttribute: ") + (lexical-let* ((outbuf (get-buffer-create (format "*depot-out/%s*" attribute))) + (errbuf (get-buffer-create (format "*depot-errors/%s*" attribute))) + (expression (format "let depot = import <depot> {}; in depot.nix.buildLisp.sbclWith [ depot.%s ]" attribute)) + ;; TODO(tazjin): use <depot> + (command (list "nix-build" "--no-out-link" "-I" (format "depot=%s" nix-depot-path) "-E" expression))) + + (message "Acquiring Lisp for <depot>.%s" attribute) + (make-process :name (format "depot-nix-build/%s" attribute) + :buffer outbuf + :stderr errbuf + :command command + :sentinel + (lambda (process event) + (unwind-protect + (pcase event + ("finished\n" + (let* ((outpath (s-trim (with-current-buffer outbuf (buffer-string)))) + (lisp-path (s-concat outpath "/bin/sbcl"))) + (message "Acquired Lisp for <depot>.%s at %s" attribute lisp-path) + (sly lisp-path))) + (_ (with-current-buffer errbuf + (error "Failed to build '%s':\n%s" attribute (buffer-string))))) + (kill-buffer outbuf) + (kill-buffer errbuf)))))) + +(provide 'nix-util) diff --git a/tools/emacs-pkgs/notable/OWNERS b/tools/emacs-pkgs/notable/OWNERS new file mode 100644 index 000000000000..f7da62ecf709 --- /dev/null +++ b/tools/emacs-pkgs/notable/OWNERS @@ -0,0 +1,2 @@ +owners: + - tazjin diff --git a/tools/emacs-pkgs/notable/default.nix b/tools/emacs-pkgs/notable/default.nix new file mode 100644 index 000000000000..8c6935fe886b --- /dev/null +++ b/tools/emacs-pkgs/notable/default.nix @@ -0,0 +1,15 @@ +{ depot, ... }: + +depot.tools.emacs-pkgs.buildEmacsPackage rec { + pname = "notable"; + version = "1.0"; + src = ./notable.el; + + externalRequires = epkgs: with epkgs; [ + f ht s + ]; + + internalRequires = [ + depot.tools.emacs-pkgs.dottime + ]; +} diff --git a/tools/emacs-pkgs/notable/notable.el b/tools/emacs-pkgs/notable/notable.el new file mode 100644 index 000000000000..4668dd333c99 --- /dev/null +++ b/tools/emacs-pkgs/notable/notable.el @@ -0,0 +1,251 @@ +;;; notable.el --- a simple note-taking app -*- lexical-binding: t; -*- +;; +;; Copyright (C) 2020 The TVL Contributors +;; +;; Author: Vincent Ambo <mail@tazj.in> +;; Version: 1.0 +;; Package-Requires: (cl-lib dash f rx s subr-x) +;; +;;; Commentary: +;; +;; This package provides a simple note-taking application which can be +;; invoked from anywhere in Emacs, with several interactive +;; note-taking functions included. +;; +;; As is tradition for my software, the idea here is to reduce +;; friction which I see even with tools like `org-capture', because +;; `org-mode' does a ton of things I don't care about. +;; +;; Notable stores its notes in simple JSON files in the folder +;; specified by `notable-note-dir'. + +(require 'cl-lib) +(require 'dottime) +(require 'f) +(require 'ht) +(require 'rx) +(require 's) +(require 'subr-x) + +;; User-facing customisation options + +(defgroup notable nil + "Simple note-taking application." + :group 'applications) + +;; TODO(tazjin): Use whatever the XDG state dir thing is for these by +;; default. +(defcustom notable-note-dir (expand-file-name "~/.notable/") + "File path to the directory containing notable's notes." + :type 'string + :group 'notable) + +;; Package internal definitions + +(cl-defstruct (notable--note (:constructor notable--make-note)) + "Structure containing the fields of a single notable note." + time ;; UNIX timestamp at which the note was taken + content ;; Textual content of the note + ) + +(defvar notable--note-lock (make-mutex "notable-notes") + "Exclusive lock for note operations with shared state.") + +(defvar notable--note-regexp + (rx "note-" + (group (one-or-more (any num))) + ".json") + "Regular expression to match note file names.") + +(defvar notable--next-note + (let ((next 0)) + (dolist (file (f-entries notable-note-dir)) + (when-let* ((match (string-match notable--note-regexp file)) + (id (string-to-number + (match-string 1 file))) + (larger (> id next))) + (setq next id))) + (+ 1 next)) + "Next ID to use for notes. Initial value is determined based on + the existing notes files.") + +(defun notable--serialize-note (note) + "Serialise NOTE into JSON format." + (check-type note notable--note) + (json-serialize (ht ("time" (notable--note-time note)) + ("content" (notable--note-content note))))) + +(defun notable--deserialize-note (json) + "Deserialise JSON into a notable note." + (check-type json string) + (let ((parsed (json-parse-string json))) + (unless (and (ht-contains? parsed "time") + (ht-contains-p parsed "content")) + (error "Missing required keys in note structure!")) + (notable--make-note :time (ht-get parsed "time") + :content (ht-get parsed "content")))) + +(defun notable--next-id () + "Return the next note ID and increment the counter." + (with-mutex notable--note-lock + (let ((id notable--next-note)) + (setq notable--next-note (+ 1 id)) + id))) + +(defun notable--note-path (id) + (check-type id integer) + (f-join notable-note-dir (format "note-%d.json" id))) + +(defun notable--archive-path (id) + (check-type id integer) + (f-join notable-note-dir (format "archive-%d.json" id))) + +(defun notable--add-note (content) + "Add a note with CONTENT to the note store." + (let* ((id (notable--next-id)) + (note (notable--make-note :time (time-convert nil 'integer) + :content content)) + (path (notable--note-path id))) + (when (f-exists? path) (error "Note file '%s' already exists!" path)) + (f-write-text (notable--serialize-note note) 'utf-8 path) + (message "Saved note %d" id))) + +(defun notable--archive-note (id) + "Archive the note with ID." + (check-type id integer) + + (unless (f-exists? (notable--note-path id)) + (error "There is no note with ID %d." id)) + + (when (f-exists? (notable--archive-path id)) + (error "Oh no, a note with ID %d has already been archived!" id)) + + (f-move (notable--note-path id) (notable--archive-path id)) + (message "Archived note with ID %d." id)) + +(defun notable--list-note-ids () + "List all note IDs (not contents) from `notable-note-dir'" + (cl-loop for file in (f-entries notable-note-dir) + with res = nil + if (string-match notable--note-regexp file) + do (push (string-to-number (match-string 1 file)) res) + finally return res)) + +(defun notable--get-note (id) + (let ((path (notable--note-path id))) + (unless (f-exists? path) + (error "No note with ID %s in note storage!" id)) + (notable--deserialize-note (f-read-text path 'utf-8)))) + +;; Note view buffer implementation + +(defvar-local notable--buffer-note nil "The note ID displayed by this buffer.") + +(define-derived-mode notable-note-mode fundamental-mode "notable-note" + "Major mode displaying a single Notable note." + (set (make-local-variable 'scroll-preserve-screen-position) t) + (setq truncate-lines t) + (setq buffer-read-only t) + (setq buffer-undo-list t)) + +(setq notable-note-mode-map + (let ((map (make-sparse-keymap))) + (define-key map "q" 'kill-current-buffer) + map)) + +(defun notable--show-note (id) + "Display a single note in a separate buffer." + (check-type id integer) + + (let ((note (notable--get-note id)) + (buffer (get-buffer-create (format "*notable: %d*" id))) + (inhibit-read-only t)) + (with-current-buffer buffer + (notable-note-mode) + (erase-buffer) + (setq notable--buffer-note id) + (setq header-line-format + (format "Note from %s" + (dottime-format + (seconds-to-time (notable--note-time note)))))) + (switch-to-buffer buffer) + (goto-char (point-min)) + (insert (notable--note-content note)))) + +(defun notable--show-note-at-point () + (interactive) + (notable--show-note (get-text-property (point) 'notable-note-id))) + +(defun notable--archive-note-at-point () + (interactive) + (notable--archive-note (get-text-property (point) 'notable-note-id))) + +;; Note list buffer implementation + +(define-derived-mode notable-list-mode fundamental-mode "notable" + "Major mode displaying the Notable note list." + ;; TODO(tazjin): `imenu' functions? + + (set (make-local-variable 'scroll-preserve-screen-position) t) + (setq truncate-lines t) + (setq buffer-read-only t) + (setq buffer-undo-list t) + (hl-line-mode t)) + +(setq notable-list-mode-map + (let ((map (make-sparse-keymap))) + (define-key map "a" 'notable--archive-note-at-point) + (define-key map "q" 'kill-current-buffer) + (define-key map "g" 'notable-list-notes) + (define-key map (kbd "RET") 'notable--show-note-at-point) + map)) + +(defun notable--render-note (id note) + (check-type id integer) + (check-type note notable--note) + + (let* ((start (point)) + (date (dottime-format (seconds-to-time + (notable--note-time note)))) + (first-line (truncate-string-to-width + (car (s-lines (notable--note-content note))) + ;; Length of the window, minus the date prefix: + (- (window-width) (+ 2 (length date))) + nil nil 1))) + (insert (propertize (s-concat date " " first-line) + 'notable-note-id id)) + (insert "\n"))) + +(defun notable--render-notes (notes) + "Retrieve each note in NOTES by ID and insert its contents into +the list buffer. + +For larger notes only the first line is displayed." + (dolist (id notes) + (notable--render-note id (notable--get-note id)))) + +;; User-facing functions + +(defun notable-take-note (content) + "Interactively prompt the user for a note that should be stored +in Notable." + (interactive "sEnter note: ") + (check-type content string) + (notable--add-note content)) + +(defun notable-list-notes () + "Open a buffer listing all active notes." + (interactive) + + (let ((buffer (get-buffer-create "*notable*")) + (notes (notable--list-note-ids)) + (inhibit-read-only t)) + (with-current-buffer buffer + (notable-list-mode) + (erase-buffer) + (setq header-line-format "Notable notes")) + (switch-to-buffer buffer) + (goto-char (point-min)) + (notable--render-notes notes))) + +(provide 'notable) diff --git a/tools/emacs-pkgs/passively/OWNERS b/tools/emacs-pkgs/passively/OWNERS new file mode 100644 index 000000000000..56853aed59e7 --- /dev/null +++ b/tools/emacs-pkgs/passively/OWNERS @@ -0,0 +1,3 @@ +inherited: true +owners: + - tazjin diff --git a/tools/emacs-pkgs/passively/README.md b/tools/emacs-pkgs/passively/README.md new file mode 100644 index 000000000000..052c496b324d --- /dev/null +++ b/tools/emacs-pkgs/passively/README.md @@ -0,0 +1,76 @@ +<!-- SPDX-License-Identifier: MIT --> +passively +========= + +Passively is an Emacs Lisp library for passively learning new +information in an Emacs instance. + +Passively works by displaying a random piece of information to be +learned in the Emacs echoline whenever Emacs is idle for a set amount +of time. + +It was designed to aid in language acquisition by passively displaying +new vocabulary to learn. + +Passively is configured with a corpus of information (a hash table +mapping string keys to string values) and maintains a set of terms +that the user already learned in a file on disk. + +## Configuration & usage + +Configure passively like this: + +```lisp +;; Configure the terms to learn. Each term should have a key and a +;; string value which is displayed. +(setq passively-learn-terms + (ht ("забыть" "забыть - to forget") + ("действительно" "действительно - indeed, really"))) + +;; Configure a file in which passively should store its state +;; (defaults to $user-emacs-directory/passively.el) +(setq passively-store-state "/persist/tazjin/passively.el") + +;; Configure after how many seconds of idle time passively should +;; display a new piece of information. +;; (defaults to 4 seconds) +(setq passively-show-after-idle-for 5) + +;; Once this configuration has been set up, start passively: +(passively-enable) + +;; Or, if it annoys you, disable it again: +(passively-disable) +``` + +These variables are registered with `customize` and may be customised +through its interface. + +### Known terms + +Passively exposes the interactive function +`passively-mark-last-as-known` which marks the previously displayed +term as known. This means that it will not be included in the random +selection anymore. + +### Last term + +Passively stores the key of the last known term in +`passively-last-displayed`. + +## Installation + +Inside of the TVL depot, you can install passively from +`pkgs.emacsPackages.tvlPackages.passively`. Outside of the depot, you +can clone passively like this: + + git clone https://code.tvl.fyi/depot.git:/tools/emacs-pkgs/passively.git + +Passively depends on `ht.el`. + +Feel free to contribute patches by emailing them to `depot@tazj.in` + +## Use-cases + +I'm using passively to learn Russian vocabulary. Once I've cleaned up +my configuration for that, my Russian term list will be linked here. diff --git a/tools/emacs-pkgs/passively/default.nix b/tools/emacs-pkgs/passively/default.nix new file mode 100644 index 000000000000..ec59cc85fd8f --- /dev/null +++ b/tools/emacs-pkgs/passively/default.nix @@ -0,0 +1,8 @@ +{ depot, ... }: + +depot.tools.emacs-pkgs.buildEmacsPackage { + pname = "passively"; + version = "1.0"; + src = ./passively.el; + externalRequires = (epkgs: with epkgs; [ ht ]); +} diff --git a/tools/emacs-pkgs/passively/passively.el b/tools/emacs-pkgs/passively/passively.el new file mode 100644 index 000000000000..0d871f26add6 --- /dev/null +++ b/tools/emacs-pkgs/passively/passively.el @@ -0,0 +1,121 @@ +;;; passively.el --- Passively learn new information -*- lexical-binding: t; -*- +;; +;; SPDX-License-Identifier: MIT +;; Copyright (C) 2020 The TVL Contributors +;; +;; Author: Vincent Ambo <tazjin@tvl.su> +;; Version: 1.0 +;; Package-Requires: (ht seq) +;; URL: https://code.tvl.fyi/about/tools/emacs-pkgs/passively/ +;; +;; This file is not part of GNU Emacs. + +(require 'ht) +(require 'seq) + +;; Customisation options + +(defgroup passively nil + "Customisation options for passively" + :group 'applications) + +(defcustom passively-learn-terms nil + "Terms that passively should randomly display to the user. The +format of this variable is a hash table with a string key that +uniquely identifies the term, and a string value that is +displayed to the user. + +For example, a possible value could be: + + (ht (\"забыть\" \"забыть - to forget\") + (\"действительно\" \"действительно - indeed, really\"))) +" + ;; TODO(tazjin): No hash-table type in customization.el? + :type '(sexp) + :group 'passively) + +(defcustom passively-store-state (format "%spassively.el" user-emacs-directory) + "File in which passively should store its state (e.g. known terms)" + :type '(file) + :group 'passively) + +(defcustom passively-show-after-idle-for 4 + "Number of seconds after Emacs goes idle that passively should +wait before displaying a term." + :type '(integer) + :group 'passively) + +;; Implementation of state persistence +(defvar passively-last-displayed nil + "Key of the last displayed passively term.") + +(defvar passively--known-terms (make-hash-table) + "Set of terms that are already known.") + +(defun passively--persist-known-terms () + "Persist the set of known passively terms to disk." + (with-temp-file passively-store-state + (insert (prin1-to-string (ht-keys passively--known-terms))))) + +(defun passively--load-known-terms () + "Load the set of known passively terms from disk." + (with-temp-buffer + (insert-file-contents passively-store-state) + (let ((keys (read (current-buffer)))) + (setq passively--known-terms (make-hash-table)) + (seq-do + (lambda (key) (ht-set passively--known-terms key t)) + keys))) + (message "passively: loaded %d known words" + (seq-length (ht-keys passively--known-terms)))) + +(defun passively-mark-last-as-known () + "Mark the last term that passively displayed as known. It will +not be displayed again." + (interactive) + + (ht-set passively--known-terms passively-last-displayed t) + (passively--persist-known-terms) + (message "passively: Marked '%s' as known" passively-last-displayed)) + +;; Implementation of main display logic +(defvar passively--display-timer nil + "idle-timer used for displaying terms by passively") + +(defun passively--random-term (timeout) + ;; This is stupid, calculate set intersections instead. + (if (< 1000 timeout) + (error "It seems you already know all the terms?") + (seq-random-elt (ht-keys passively-learn-terms)))) + +(defun passively--display-random-term () + (let* ((timeout 1) + (term (passively--random-term timeout))) + (while (ht-contains? passively--known-terms term) + (setq timeout (+ 1 timeout)) + (setq term (passively--random-term timeout))) + (setq passively-last-displayed term) + (message (ht-get passively-learn-terms term)))) + +(defun passively-enable () + "Enable automatic display of terms via passively." + (interactive) + (if passively--display-timer + (error "passively: Already running!") + (passively--load-known-terms) + (setq passively--display-timer + (run-with-idle-timer passively-show-after-idle-for t + #'passively--display-random-term)) + (message "passively: Now running after %s seconds of idle time" + passively-show-after-idle-for))) + +(defun passively-disable () + "Turn off automatic display of terms via passively." + (interactive) + (unless passively--display-timer + (error "passively: Not running!")) + (cancel-timer passively--display-timer) + (setq passively--display-timer nil) + (message "passively: Now disabled")) + +(provide 'passively) diff --git a/tools/emacs-pkgs/term-switcher/default.nix b/tools/emacs-pkgs/term-switcher/default.nix new file mode 100644 index 000000000000..e775de5cdbe8 --- /dev/null +++ b/tools/emacs-pkgs/term-switcher/default.nix @@ -0,0 +1,8 @@ +{ depot, ... }: + +depot.tools.emacs-pkgs.buildEmacsPackage { + pname = "term-switcher"; + version = "1.0"; + src = ./term-switcher.el; + externalRequires = epkgs: with epkgs; [ dash ivy s vterm ]; +} diff --git a/tools/emacs-pkgs/term-switcher/term-switcher.el b/tools/emacs-pkgs/term-switcher/term-switcher.el new file mode 100644 index 000000000000..0055f87fd67f --- /dev/null +++ b/tools/emacs-pkgs/term-switcher/term-switcher.el @@ -0,0 +1,57 @@ +;;; term-switcher.el --- Easily switch between open vterms +;; +;; Copyright (C) 2019 Google Inc. +;; +;; Author: Vincent Ambo <tazjin@google.com> +;; Version: 1.1 +;; Package-Requires: (dash ivy s vterm) +;; +;;; Commentary: +;; +;; This package adds a function that lets users quickly switch between +;; different open vterms via ivy. + +(require 'dash) +(require 'ivy) +(require 's) +(require 'vterm) + +(defgroup term-switcher nil + "Customization options `term-switcher'.") + +(defcustom term-switcher-buffer-prefix "vterm<" + "String prefix for vterm terminal buffers. For example, if you + set your titles to match `vterm<...>' a useful prefix might be + `vterm<'." + :type '(string) + :group 'term-switcher) + +(defun ts/open-or-create-vterm (buffer-name) + "Switch to the buffer with BUFFER-NAME or create a new vterm + buffer." + (if (equal "New vterm" buffer-name) + (vterm) + (if-let ((buffer (get-buffer buffer-name))) + (switch-to-buffer buffer) + (error "Could not find vterm buffer: %s" buffer-name)))) + +(defun ts/is-vterm-buffer (buffer) + "Determine whether BUFFER runs a vterm." + (equal 'vterm-mode (buffer-local-value 'major-mode buffer))) + +(defun ts/switch-to-terminal () + "Switch to an existing vterm buffer or create a new one." + + (interactive) + (let ((terms (-map #'buffer-name + (-filter #'ts/is-vterm-buffer (buffer-list))))) + (if terms + (ivy-read "Switch to vterm: " + (cons "New vterm" terms) + :caller 'ts/switch-to-terminal + :preselect (s-concat "^" term-switcher-buffer-prefix) + :require-match t + :action #'ts/open-or-create-vterm) + (vterm)))) + +(provide 'term-switcher) diff --git a/tools/emacs-pkgs/tvl/OWNERS b/tools/emacs-pkgs/tvl/OWNERS new file mode 100644 index 000000000000..ce7e0e37ee4f --- /dev/null +++ b/tools/emacs-pkgs/tvl/OWNERS @@ -0,0 +1,3 @@ +inherited: true +owners: + - grfn diff --git a/tools/emacs-pkgs/tvl/default.nix b/tools/emacs-pkgs/tvl/default.nix new file mode 100644 index 000000000000..5dcc184bb521 --- /dev/null +++ b/tools/emacs-pkgs/tvl/default.nix @@ -0,0 +1,8 @@ +{ depot, ... }: + +depot.tools.emacs-pkgs.buildEmacsPackage { + pname = "tvl"; + version = "1.0"; + src = ./tvl.el; + externalRequires = (epkgs: with epkgs; [ magit s ]); +} diff --git a/tools/emacs-pkgs/tvl/tvl.el b/tools/emacs-pkgs/tvl/tvl.el new file mode 100644 index 000000000000..6888de3571b6 --- /dev/null +++ b/tools/emacs-pkgs/tvl/tvl.el @@ -0,0 +1,188 @@ +;;; tvl.el --- description -*- lexical-binding: t; -*- +;; +;; Copyright (C) 2020 Griffin Smith +;; Copyright (C) 2020 The TVL Contributors +;; +;; Author: Griffin Smith <grfn@gws.fyi> +;; Version: 0.0.1 +;; Package-Requires: (s dash magit) +;; +;; This file is not part of GNU Emacs. +;; +;;; Commentary: +;; +;; This file provides shared utilities for interacting with the TVL monorepo +;; +;;; Code: + +(require 'magit) +(require 's) + +(defgroup tvl nil + "Customisation options for TVL functionality.") + +(defcustom tvl-gerrit-remote "origin" + "Name of the git remote for gerrit" + :type '(string) + :group 'tvl) + +(defcustom tvl-depot-path "/depot" + "Location at which the TVL depot is checked out." + :type '(string) + :group 'tvl) + +(defcustom tvl-target-branch "canon" + "Branch to use to target CLs" + :group 'tvl + :type '(string) + :safe (lambda (_) t)) + +(defun tvl--gerrit-ref (target-branch &optional flags) + (let ((flag-suffix (if flags (format "%%%s" (s-join "," flags)) + ""))) + (format "HEAD:refs/for/%s%s" target-branch flag-suffix))) + +(transient-define-suffix magit-gerrit-push-for-review () + "Push to Gerrit for review." + (interactive) + (magit-push-refspecs tvl-gerrit-remote + (tvl--gerrit-ref tvl-target-branch) + nil)) + +(transient-append-suffix + #'magit-push ["r"] + (list "R" "push to Gerrit for review" #'magit-gerrit-push-for-review)) + +(transient-define-suffix magit-gerrit-push-wip () + "Push to Gerrit as a work-in-progress." + (interactive) + (magit-push-refspecs tvl-gerrit-remote + (concat (tvl--gerrit-ref tvl-target-branch) "%wip") + nil)) + +(transient-append-suffix + #'magit-push ["r"] + (list "W" "push to Gerrit as a work-in-progress" #'magit-gerrit-push-wip)) + +(transient-define-suffix magit-gerrit-push-autosubmit () + "Push to Gerrit with autosubmit enabled." + (interactive) + (magit-push-refspecs tvl-gerrit-remote + (tvl--gerrit-ref tvl-target-branch '("l=Autosubmit+1")) + nil)) + +(transient-append-suffix + #'magit-push ["r"] + (list "A" "push to Gerrit with autosubmit enabled" #'magit-gerrit-push-autosubmit)) + +(transient-define-suffix magit-gerrit-submit () + "Push to Gerrit for review." + (interactive) + (magit-push-refspecs tvl-gerrit-remote + (tvl--gerrit-ref tvl-target-branch '("submit")) + nil)) + +(transient-append-suffix + #'magit-push ["r"] + (list "S" "push to Gerrit to submit" #'magit-gerrit-submit)) + + +(transient-define-suffix magit-gerrit-rubberstamp () + "Push, approve and autosubmit to Gerrit. CLs created via this +rubberstamp method will automatically be submitted after CI +passes. This is potentially dangerous, use with care." + (interactive) + (magit-push-refspecs tvl-gerrit-remote + (tvl--gerrit-ref tvl-target-branch + '("l=Code-Review+2" + "l=Autosubmit+1" + "publish-comments")) + nil)) + +(transient-append-suffix + #'magit-push ["r"] + (list "P" "push & rubberstamp to Gerrit" #'magit-gerrit-rubberstamp)) + +(defvar magit-cl-history nil) +(defun magit-read-cl (remote) + (let* ((refs (prog2 (message "Determining available refs...") + (magit-remote-list-refs remote) + (message "Determining available refs...done"))) + (change-refs (-filter + (apply-partially #'string-prefix-p "refs/changes/") + refs)) + (cl-number-to-refs + (-group-by + (lambda (change-ref) + ;; refs/changes/34/1234/1 + ;; ^ ^ ^ ^ ^ + ;; 1 2 3 4 5 + ;; ^-- this one + (cadddr + (split-string change-ref (rx "/")))) + change-refs)) + (cl-numbers + (-map + (lambda (cl-to-refs) + (let ((latest-patchset-ref + (-max-by + (-on #'> (lambda (ref) + (string-to-number + (fifth (split-string ref (rx "/")))))) + (-remove + (apply-partially #'s-ends-with-p "meta") + (cdr cl-to-refs))))) + (propertize (car cl-to-refs) 'ref latest-patchset-ref))) + cl-number-to-refs))) + (get-text-property + 0 + 'ref + (magit-completing-read + "Checkout CL" cl-numbers nil t nil 'magit-cl-history)))) + +(transient-define-suffix magit-gerrit-checkout (remote cl-refspec) + "Prompt for a CL number and checkout the latest patchset of that CL with + detached HEAD" + (interactive + (let* ((remote tvl-gerrit-remote) + (cl (magit-read-cl remote))) + (list remote cl))) + (magit-fetch-refspec remote cl-refspec (magit-fetch-arguments)) + ;; That runs async, so wait for it to finish (this is how magit does it) + (while (and magit-this-process + (eq (process-status magit-this-process) 'run)) + (sleep-for 0.005)) + (magit-checkout "FETCH_HEAD" (magit-branch-arguments)) + (message "HEAD detached at %s" cl-refspec)) + + +(transient-append-suffix + #'magit-branch ["l"] + (list "g" "gerrit CL" #'magit-gerrit-checkout)) + +(transient-define-suffix magit-gerrit-cherry-pick (remote cl-refspec) + "Prompt for a CL number and cherry-pick the latest patchset of that CL" + (interactive + (let* ((remote tvl-gerrit-remote) + (cl (magit-read-cl remote))) + (list remote cl))) + (magit-fetch-refspec remote cl-refspec (magit-fetch-arguments)) + ;; That runs async, so wait for it to finish (this is how magit does it) + (while (and magit-this-process + (eq (process-status magit-this-process) 'run)) + (sleep-for 0.005)) + (magit-cherry-copy (list "FETCH_HEAD")) + (message "HEAD detached at %s" cl-refspec)) + + +(transient-append-suffix + #'magit-cherry-pick ["m"] + (list "g" "Gerrit CL" #'magit-gerrit-cherry-pick)) + +(defun tvl-depot-status () + "Open the TVL monorepo in magit." + (interactive) + (magit-status-setup-buffer tvl-depot-path)) + +(provide 'tvl) +;;; tvl.el ends here diff --git a/tools/eprintf.nix b/tools/eprintf.nix new file mode 100644 index 000000000000..eeacca4c8c72 --- /dev/null +++ b/tools/eprintf.nix @@ -0,0 +1,9 @@ +{ depot, pkgs, ... }: + +let + bins = depot.nix.getBins pkgs.coreutils [ "printf" ]; + +# printf(1), but redirect to stderr +in depot.nix.writeExecline "eprintf" {} [ + "fdmove" "-c" "1" "2" bins.printf "$@" +] diff --git a/tools/gerrit-cli.nix b/tools/gerrit-cli.nix new file mode 100644 index 000000000000..1606155a8068 --- /dev/null +++ b/tools/gerrit-cli.nix @@ -0,0 +1,13 @@ +# Utility script to run a gerrit command on the depot host via ssh. +# Reads the username from TVL_USERNAME, or defaults to $(whoami) +{ pkgs, ... }: + +pkgs.writeShellScriptBin "gerrit" '' + TVL_USERNAME=''${TVL_USERNAME:-$(whoami)} + if which ssh &>/dev/null; then + ssh=ssh + else + ssh="${pkgs.openssh}/bin/ssh" + fi + exec $ssh $TVL_USERNAME@code.tvl.fyi -p 29418 -- gerrit $@ +'' diff --git a/tools/gerrit-update.nix b/tools/gerrit-update.nix new file mode 100644 index 000000000000..e4efd89ea597 --- /dev/null +++ b/tools/gerrit-update.nix @@ -0,0 +1,34 @@ +# Utility script to perform a Gerrit update. +{ pkgs, ... }: + +pkgs.writeShellScriptBin "gerrit-update" '' + set -euo pipefail + + if [[ $EUID -ne 0 ]]; then + echo "Oh no! Only root is allowed to update Gerrit!" >&2 + exit 1 + fi + + gerrit_war="$(find "${pkgs.gerrit}/webapps" -name 'gerrit*.war')" + java="${pkgs.jdk}/bin/java" + backup_path="/root/gerrit_preupgrade-$(date +"%Y-%m-%d").tar.bz2" + + # Take a safety backup of Gerrit into /root's homedir. Just in case. + echo "Backing up Gerrit to $backup_path" + tar -cjf "$backup_path" /var/lib/gerrit + + # Stop Gerrit (and its activation socket). + echo "Stopping Gerrit" + systemctl stop gerrit.service gerrit.socket + + # Ask Gerrit to do a schema upgrade... + echo "Performing schema upgrade" + "$java" -jar "$gerrit_war" \ + init --no-auto-start --batch --skip-plugins --site-path "/var/lib/gerrit" + + # Restart Gerrit. + echo "Restarting Gerrit" + systemctl start gerrit.socket gerrit.service + + echo "...done" +'' diff --git a/tools/hash-password.nix b/tools/hash-password.nix new file mode 100644 index 000000000000..9893d521787e --- /dev/null +++ b/tools/hash-password.nix @@ -0,0 +1,7 @@ +# Utility for invoking slappasswd with the correct options for +# creating an ARGON2 password hash. +{ pkgs, ... }: + +pkgs.writeShellScriptBin "hash-password" '' + ${pkgs.openldap}/bin/slappasswd -o module-load=pw-argon2 -h '{ARGON2}' +'' diff --git a/tools/monzo_ynab/.envrc b/tools/monzo_ynab/.envrc deleted file mode 100644 index f368d0b7e813..000000000000 --- a/tools/monzo_ynab/.envrc +++ /dev/null @@ -1,8 +0,0 @@ -source_up -use_nix -export monzo_client_id="$(jq -j '.monzo | .clientId' < ~/briefcase/secrets.json)" -export monzo_client_secret="$(jq -j '.monzo | .clientSecret' < ~/briefcase/secrets.json)" -export ynab_personal_access_token="$(jq -j '.ynab | .personalAccessToken' < ~/briefcase/secrets.json)" -export ynab_account_id="$(jq -j '.ynab | .accountId' < ~/briefcase/secrets.json)" -export ynab_budget_id="$(jq -j '.ynab | .budgetId' < ~/briefcase/secrets.json)" -export store_path="$(pwd)" diff --git a/tools/monzo_ynab/.gitignore b/tools/monzo_ynab/.gitignore deleted file mode 100644 index e92078303bec..000000000000 --- a/tools/monzo_ynab/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -/ynab/fixture.json -/monzo/fixture.json -/kv.json diff --git a/tools/monzo_ynab/README.md b/tools/monzo_ynab/README.md deleted file mode 100644 index 4ccbb35d8c5d..000000000000 --- a/tools/monzo_ynab/README.md +++ /dev/null @@ -1,41 +0,0 @@ -# monzo_ynab - -Exporting Monzo transactions to my YouNeedABudget.com (i.e. YNAB) account. YNAB -unfortunately doesn't currently offer an Monzo integration. As a workaround and -a practical excuse to learn Go, I decided to write one myself. - -This job is going to run N times per 24 hours. Monzo offers webhooks for -reacting to certain types of events. I don't expect I'll need realtime data for -my YNAB integration. That may change, however, so it's worth noting. - -## Installation - -Like many other packages in this repository, `monzo_ynab` is packaged using -Nix. To install and use, you have two options: - -You can install using `nix-build` and then run the resulting -`./result/bin/monzo_ynab`. - -```shell -> nix-build . && ./result/bin/monzo_ynab -``` - -Or you can install using `nix-env` if you'd like to create the `monzo_ynab` -symlink. - -```shell -> nix-env -f ~/briefcase/monzo_ynab -i -``` - -## Deployment - -While this project is currently not deployed, my plan is to host it on Google -Cloud and run it as a Cloud Run application. What I don't yet know is whether or -not this is feasible or a good idea. One complication that I foresee is that the -OAuth 2.0 login flow requires a web browser until the access token and refresh -tokens are acquired. I'm unsure how to workaround this at the moment. - -For more information about the general packaging and deployment strategies I'm -currently using, refer to the [deployments][deploy] writeup. - -[deploy]: ../deploy/README.md diff --git a/tools/monzo_ynab/auth.go b/tools/monzo_ynab/auth.go deleted file mode 100644 index b66bacb10687..000000000000 --- a/tools/monzo_ynab/auth.go +++ /dev/null @@ -1,101 +0,0 @@ -package auth - -//////////////////////////////////////////////////////////////////////////////// -// Dependencies -//////////////////////////////////////////////////////////////////////////////// - -import ( - "encoding/json" - "fmt" - "log" - "net/http" - "net/url" - "os" - "os/exec" - "utils" -) - -//////////////////////////////////////////////////////////////////////////////// -// Constants -//////////////////////////////////////////////////////////////////////////////// - -var ( - BROWSER = os.Getenv("BROWSER") - REDIRECT_URI = "http://localhost:8080/authorization-code" -) - -//////////////////////////////////////////////////////////////////////////////// -// Types -//////////////////////////////////////////////////////////////////////////////// - -// This is the response returned from Monzo when we exchange our authorization -// code for an access token. While Monzo returns additional fields, I'm only -// interested in AccessToken and RefreshToken. -type accessTokenResponse struct { - AccessToken string `json:"access_token"` - RefreshToken string `json:"refresh_token"` - ExpiresIn int `json:"expires_in"` -} - -type Tokens struct { - AccessToken string - RefreshToken string - ExpiresIn int -} - -//////////////////////////////////////////////////////////////////////////////// -// Functions -//////////////////////////////////////////////////////////////////////////////// - -// Returns the access token and refresh tokens for the Monzo API. -func GetTokensFromAuthCode(authCode string, clientID string, clientSecret string) *Tokens { - res, err := http.PostForm("https://api.monzo.com/oauth2/token", url.Values{ - "grant_type": {"authorization_code"}, - "client_id": {clientID}, - "client_secret": {clientSecret}, - "redirect_uri": {REDIRECT_URI}, - "code": {authCode}, - }) - utils.FailOn(err) - defer res.Body.Close() - payload := &accessTokenResponse{} - json.NewDecoder(res.Body).Decode(payload) - - return &Tokens{payload.AccessToken, payload.RefreshToken, payload.ExpiresIn} -} - -// Open a web browser to allow the user to authorize this application. Return -// the authorization code sent from Monzo. -func GetAuthCode(clientID string) string { - // TODO(wpcarro): Consider generating a random string for the state when the - // application starts instead of hardcoding it here. - state := "xyz123" - url := fmt.Sprintf( - "https://auth.monzo.com/?client_id=%s&redirect_uri=%s&response_type=code&state=%s", - clientID, REDIRECT_URI, state) - exec.Command(BROWSER, url).Start() - - authCode := make(chan string) - go func() { - log.Fatal(http.ListenAndServe(":8080", - http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { - // 1. Get authorization code from Monzo. - if req.URL.Path == "/authorization-code" { - params := req.URL.Query() - reqState := params["state"][0] - code := params["code"][0] - - if reqState != state { - log.Fatalf("Value for state returned by Monzo does not equal our state. %s != %s", reqState, state) - } - authCode <- code - - fmt.Fprintf(w, "Authorized!") - } else { - log.Printf("Unhandled request: %v\n", *req) - } - }))) - }() - result := <-authCode - return result -} diff --git a/tools/monzo_ynab/job.nix b/tools/monzo_ynab/job.nix deleted file mode 100644 index 1e10751012e2..000000000000 --- a/tools/monzo_ynab/job.nix +++ /dev/null @@ -1,12 +0,0 @@ -{ depot, briefcase, ... }: - -depot.buildGo.program { - name = "job"; - srcs = [ - ./main.go - ]; - deps = with briefcase.gopkgs; [ - kv - utils - ]; -} diff --git a/tools/monzo_ynab/main.go b/tools/monzo_ynab/main.go deleted file mode 100644 index 06f1944eab70..000000000000 --- a/tools/monzo_ynab/main.go +++ /dev/null @@ -1,43 +0,0 @@ -// Exporting Monzo transactions to my YouNeedABudget.com (i.e. YNAB) -// account. YNAB unfortunately doesn't currently offer an Monzo integration. As -// a workaround and a practical excuse to learn Go, I decided to write one -// myself. -// -// This job is going to run N times per 24 hours. Monzo offers webhooks for -// reacting to certain types of events. I don't expect I'll need realtime data -// for my YNAB integration. That may change, however, so it's worth noting. - -package main - -import ( - "fmt" -) - -var ( - ynabAccountID = os.Getenv("ynab_account_id") -) - -//////////////////////////////////////////////////////////////////////////////// -// Business Logic -//////////////////////////////////////////////////////////////////////////////// - -// Convert a Monzo transaction struct, `tx`, into a YNAB transaction struct. -func toYnab(tx monzoSerde.Transaction) ynabSerde.Transaction { - return ynabSerde.Transaction{ - Id: tx.Id, - Date: tx.Created, - Amount: tx.Amount, - Memo: tx.Notes, - AccountId: ynabAccountID, - } -} - -func main() { - txs := monzo.TransactionsLast24Hours() - var ynabTxs []ynabSerde.Transaction{} - for tx := range txs { - append(ynabTxs, toYnab(tx)) - } - ynab.PostTransactions(ynabTxs) - os.Exit(0) -} diff --git a/tools/monzo_ynab/monzo/client.go b/tools/monzo_ynab/monzo/client.go deleted file mode 100644 index 8c6c41e29f40..000000000000 --- a/tools/monzo_ynab/monzo/client.go +++ /dev/null @@ -1,52 +0,0 @@ -package monzoClient - -import ( - "fmt" - "log" - "monzoSerde" - "net/http" - "net/url" - "strings" - "time" - "tokens" - "utils" -) - -const ( - accountID = "pizza" -) - -type Client struct{} - -// Ensure that the token server is running and return a new instance of a Client -// struct. -func Create() *Client { - tokens.StartServer() - time.Sleep(time.Second * 1) - return &Client{} -} - -// Returns a slice of transactions from the last 24 hours. -func (c *Client) Transactions24Hours() []monzoSerde.Transaction { - token := tokens.AccessToken() - form := url.Values{"account_id": {accountID}} - client := http.Client{} - req, _ := http.NewRequest("POST", "https://api.monzo.com/transactions", - strings.NewReader(form.Encode())) - req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token)) - req.Header.Add("Content-Type", "application/x-www-form-urlencoded") - req.Header.Add("User-Agent", "monzo-ynab") - res, err := client.Do(req) - - utils.DebugRequest(req) - utils.DebugResponse(res) - - if err != nil { - utils.DebugRequest(req) - utils.DebugResponse(res) - log.Fatal(err) - } - defer res.Body.Close() - - return []monzoSerde.Transaction{} -} diff --git a/tools/monzo_ynab/monzo/serde.go b/tools/monzo_ynab/monzo/serde.go deleted file mode 100644 index a38585eca632..000000000000 --- a/tools/monzo_ynab/monzo/serde.go +++ /dev/null @@ -1,82 +0,0 @@ -// This package hosts the serialization and deserialization logic for all of the -// data types with which our application interacts from the Monzo API. -package main - -import ( - "encoding/json" - "fmt" - "io/ioutil" - "time" -) - -type TxMetadata struct { - FasterPayment string `json:"faster_payment"` - FpsPaymentId string `json:"fps_payment_id"` - Insertion string `json:"insertion"` - Notes string `json:"notes"` - Trn string `json:"trn"` -} - -type TxCounterparty struct { - AccountNumber string `json:"account_number"` - Name string `json:"name"` - SortCode string `json:"sort_code"` - UserId string `json:"user_id"` -} - -type Transaction struct { - Id string `json:"id"` - Created time.Time `json:"created"` - Description string `json:"description"` - Amount int `json:"amount"` - Currency string `json:"currency"` - Notes string `json:"notes"` - Metadata TxMetadata - AccountBalance int `json:"account_balance"` - International interface{} `json:"international"` - Category string `json:"category"` - IsLoad bool `json:"is_load"` - Settled time.Time `json:"settled"` - LocalAmount int `json:"local_amount"` - LocalCurrency string `json:"local_currency"` - Updated time.Time `json:"updated"` - AccountId string `json:"account_id"` - UserId string `json:"user_id"` - Counterparty TxCounterparty `json:"counterparty"` - Scheme string `json:"scheme"` - DedupeId string `json:"dedupe_id"` - Originator bool `json:"originator"` - IncludeInSpending bool `json:"include_in_spending"` - CanBeExcludedFromBreakdown bool `json:"can_be_excluded_from_breakdown"` - CanBeMadeSubscription bool `json:"can_be_made_subscription"` - CanSplitTheBill bool `json:"can_split_the_bill"` - CanAddToTab bool `json:"can_add_to_tab"` - AmountIsPending bool `json:"amount_is_pending"` - // Fees interface{} `json:"fees"` - // Merchant interface `json:"merchant"` - // Labels interface{} `json:"labels"` - // Attachments interface{} `json:"attachments"` - // Categories interface{} `json:"categories"` -} - -// Attempts to encode a Monzo transaction struct into a string. -func serializeTx(tx *Transaction) (string, error) { - x, err := json.Marshal(tx) - return string(x), err -} - -// Attempts to parse a string encoding a transaction presumably sent from a -// Monzo server. -func deserializeTx(x string) (*Transaction, error) { - target := &Transaction{} - err := json.Unmarshal([]byte(x), target) - return target, err -} - -func main() { - b, _ := ioutil.ReadFile("./fixture.json") - tx := string(b) - target, _ := deserializeTx(tx) - out, _ := serializeTx(target) - fmt.Println(out) -} diff --git a/tools/monzo_ynab/requests.txt b/tools/monzo_ynab/requests.txt deleted file mode 100644 index 2da17c0b326a..000000000000 --- a/tools/monzo_ynab/requests.txt +++ /dev/null @@ -1,80 +0,0 @@ -################################################################################ -# YNAB -################################################################################ -:ynab = https://api.youneedabudget.com/v1 -:ynab-access-token := (getenv "ynab_personal_access_token") -:ynab-budget-id := (getenv "ynab_budget_id") -:ynab-account-id := (getenv "ynab_account_id") - -# Test -GET :ynab/budgets -Authorization: Bearer :ynab-access-token - -# List transactions -GET :ynab/budgets/:ynab-budget-id/transactions -Authorization: Bearer :ynab-access-token - -# Post transactions -POST :ynab/budgets/:ynab-budget-id/transactions -Authorization: Bearer :ynab-access-token -Content-Type: application/json -{ - "transactions": [ - { - "account_id": ":ynab-account-id", - "date": "2019-12-30", - "amount": 10000, - "payee_name": "Richard Stallman", - "memo": "Not so free software after all...", - "cleared": "cleared", - "approved": true, - "flag_color": "red", - "import_id": "xyz-123" - } - ] -} - -################################################################################ -# Monzo -################################################################################ -:monzo = https://api.monzo.com -:monzo-access-token := (getenv "monzo_cached_access_token") -:monzo-refresh-token := (getenv "monzo_cached_refresh_token") -:monzo-client-id := (getenv "monzo_client_id") -:monzo-client-secret := (getenv "monzo_client_secret") -:monzo-account-id := (getenv "monzo_account_id") - -# List transactions -GET :monzo/transactions -Authorization: Bearer :monzo-access-token -account_id==:monzo-account-id - -# Refresh access token -# According from the docs, the access token expires in 6 hours. -POST :monzo/oauth2/token -Content-Type: application/x-www-form-urlencoded -Authorization: Bearer :monzo-access-token -grant_type=refresh_token&client_id=:monzo-client-id&client_secret=:monzo-client-secret&refresh_token=:monzo-refresh-token - -################################################################################ -# Tokens server -################################################################################ -:tokens = http://localhost:4242 - -# Get tokens -GET :tokens/tokens - -# Get application state for debugging purposes -GET :tokens/state - -# Force refresh tokens -POST :tokens/refresh-tokens - -# Set tokens -POST :tokens/set-tokens -Content-Type: application/json -{ - "access_token": "access-token", - "refresh_token": "refresh-token", - "expires_in": 120 -} diff --git a/tools/monzo_ynab/shell.nix b/tools/monzo_ynab/shell.nix deleted file mode 100644 index 910d7c1829e2..000000000000 --- a/tools/monzo_ynab/shell.nix +++ /dev/null @@ -1,10 +0,0 @@ -let - briefcase = import <briefcase> {}; - pkgs = briefcase.third_party.pkgs; -in pkgs.mkShell { - buildInputs = [ - pkgs.go - pkgs.goimports - pkgs.godef - ]; -} diff --git a/tools/monzo_ynab/tokens.go b/tools/monzo_ynab/tokens.go deleted file mode 100644 index 4be967ccb803..000000000000 --- a/tools/monzo_ynab/tokens.go +++ /dev/null @@ -1,283 +0,0 @@ -// Creating a Tokens server to manage my access and refresh tokens. Keeping this -// as a separate server allows me to develop and use the access tokens without -// going through client authorization. -package main - -//////////////////////////////////////////////////////////////////////////////// -// Dependencies -//////////////////////////////////////////////////////////////////////////////// - -import ( - "auth" - "encoding/json" - "fmt" - "io" - "kv" - "log" - "net/http" - "net/url" - "os" - "os/signal" - "syscall" - "time" - "utils" -) - -//////////////////////////////////////////////////////////////////////////////// -// Types -//////////////////////////////////////////////////////////////////////////////// - -// This is the response from Monzo's API after we request an access token -// refresh. -type refreshTokenResponse struct { - AccessToken string `json:"access_token"` - RefreshToken string `json:"refresh_token"` - ClientId string `json:"client_id"` - ExpiresIn int `json:"expires_in"` -} - -// This is the shape of the request from clients wishing to set state of the -// server. -type setTokensRequest struct { - AccessToken string `json:"access_token"` - RefreshToken string `json:"refresh_token"` - ExpiresIn int `json:"expires_in"` -} - -// This is our application state. -type state struct { - accessToken string `json:"access_token"` - refreshToken string `json:"refresh_token"` -} - -type readMsg struct { - sender chan state -} - -type writeMsg struct { - state state - sender chan bool -} - -type channels struct { - reads chan readMsg - writes chan writeMsg -} - -//////////////////////////////////////////////////////////////////////////////// -// Top-level Definitions -//////////////////////////////////////////////////////////////////////////////// - -var chans = &channels{ - reads: make(chan readMsg), - writes: make(chan writeMsg), -} - -var ( - monzoClientId = os.Getenv("monzo_client_id") - monzoClientSecret = os.Getenv("monzo_client_secret") - storePath = os.Getenv("store_path") -) - -//////////////////////////////////////////////////////////////////////////////// -// Utils -//////////////////////////////////////////////////////////////////////////////// - -// Print the access and refresh tokens for debugging. -func logTokens(access string, refresh string) { - log.Printf("Access: %s\n", access) - log.Printf("Refresh: %s\n", refresh) -} - -func (state *state) String() string { - return fmt.Sprintf("state{\n\taccessToken: \"%s\",\n\trefreshToken: \"%s\"\n}\n", state.accessToken, state.refreshToken) -} - -// Schedule a token refresh for `expiresIn` seconds using the provided -// `refreshToken`. This will update the application state with the access token -// and schedule an additional token refresh for the newly acquired tokens. -func scheduleTokenRefresh(expiresIn int, refreshToken string) { - duration := time.Second * time.Duration(expiresIn) - timestamp := time.Now().Local().Add(duration) - // TODO(wpcarro): Consider adding a more human readable version that will - // log the number of hours, minutes, etc. until the next refresh. - log.Printf("Scheduling token refresh for %v\n", timestamp) - time.Sleep(duration) - log.Println("Refreshing tokens now...") - accessToken, refreshToken := refreshTokens(refreshToken) - log.Println("Successfully refreshed tokens.") - logTokens(accessToken, refreshToken) - setState(accessToken, refreshToken) -} - -// Exchange existing credentials for a new access token and `refreshToken`. Also -// schedule the next refresh. This function returns the newly acquired access -// token and refresh token. -func refreshTokens(refreshToken string) (string, string) { - // TODO(wpcarro): Support retries with exponential backoff. - res, err := http.PostForm("https://api.monzo.com/oauth2/token", url.Values{ - "grant_type": {"refresh_token"}, - "client_id": {monzoClientId}, - "client_secret": {monzoClientSecret}, - "refresh_token": {refreshToken}, - }) - if res.StatusCode != http.StatusOK { - // TODO(wpcarro): Considering panicking here. - utils.DebugResponse(res) - } - if err != nil { - utils.DebugResponse(res) - log.Fatal("The request to Monzo to refresh our access token failed.", err) - } - defer res.Body.Close() - payload := &refreshTokenResponse{} - err = json.NewDecoder(res.Body).Decode(payload) - if err != nil { - log.Fatal("Could not decode the JSON response from Monzo.", err) - } - - go scheduleTokenRefresh(payload.ExpiresIn, payload.RefreshToken) - - // Interestingly, JSON decoding into the refreshTokenResponse can success - // even if the decoder doesn't populate any of the fields in the - // refreshTokenResponse struct. From what I read, it isn't possible to make - // these fields as required using an annotation, so this guard must suffice - // for now. - if payload.AccessToken == "" || payload.RefreshToken == "" { - log.Fatal("JSON parsed correctly but failed to populate token fields.") - } - - return payload.AccessToken, payload.RefreshToken -} - -func persistTokens(access string, refresh string) { - log.Println("Persisting tokens...") - kv.Set(storePath, "monzoAccessToken", access) - kv.Set(storePath, "monzoRefreshToken", refresh) - log.Println("Successfully persisted tokens.") -} - -// Listen for SIGINT and SIGTERM signals. When received, persist the access and -// refresh tokens and shutdown the server. -func handleInterrupts() { - // Gracefully handle interruptions. - sigs := make(chan os.Signal, 1) - done := make(chan bool) - - signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) - - go func() { - sig := <-sigs - log.Printf("Received signal to shutdown. %v\n", sig) - state := getState() - persistTokens(state.accessToken, state.refreshToken) - done <- true - }() - - <-done - log.Println("Exiting...") - os.Exit(0) -} - -// Set `accessToken` and `refreshToken` on application state. -func setState(accessToken string, refreshToken string) { - msg := writeMsg{state{accessToken, refreshToken}, make(chan bool)} - chans.writes <- msg - <-msg.sender -} - -// Return our application state. -func getState() state { - msg := readMsg{make(chan state)} - chans.reads <- msg - return <-msg.sender -} - -//////////////////////////////////////////////////////////////////////////////// -// Main -//////////////////////////////////////////////////////////////////////////////// - -func main() { - // Manage application state. - go func() { - state := &state{} - for { - select { - case msg := <-chans.reads: - log.Println("Reading from state...") - log.Println(state) - msg.sender <- *state - case msg := <-chans.writes: - log.Println("Writing to state.") - log.Printf("Old: %s\n", state) - *state = msg.state - log.Printf("New: %s\n", state) - // As an attempt to maintain consistency between application - // state and persisted state, everytime we write to the - // application state, we will write to the store. - persistTokens(state.accessToken, state.refreshToken) - msg.sender <- true - } - } - }() - - // Retrieve cached tokens from store. - accessToken := fmt.Sprintf("%v", kv.Get(storePath, "monzoAccessToken")) - refreshToken := fmt.Sprintf("%v", kv.Get(storePath, "monzoRefreshToken")) - - log.Println("Attempting to retrieve cached credentials...") - logTokens(accessToken, refreshToken) - - if accessToken == "" || refreshToken == "" { - log.Println("Cached credentials are absent. Authorizing client...") - authCode := auth.GetAuthCode(monzoClientId) - tokens := auth.GetTokensFromAuthCode(authCode, monzoClientId, monzoClientSecret) - setState(tokens.AccessToken, tokens.RefreshToken) - go scheduleTokenRefresh(tokens.ExpiresIn, tokens.RefreshToken) - } else { - setState(accessToken, refreshToken) - // If we have tokens, they may be expiring soon. We don't know because - // we aren't storing the expiration timestamp in the state or in the - // store. Until we have that information, and to be safe, let's refresh - // the tokens. - go scheduleTokenRefresh(0, refreshToken) - } - - // Gracefully handle shutdowns. - go handleInterrupts() - - // Listen to inbound requests. - fmt.Println("Listening on http://localhost:4242 ...") - log.Fatal(http.ListenAndServe(":4242", - http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { - if req.URL.Path == "/refresh-tokens" && req.Method == "POST" { - state := getState() - go scheduleTokenRefresh(0, state.refreshToken) - fmt.Fprintf(w, "Done.") - } else if req.URL.Path == "/set-tokens" && req.Method == "POST" { - // Parse - payload := &setTokensRequest{} - err := json.NewDecoder(req.Body).Decode(payload) - if err != nil { - log.Fatal("Could not decode the user's JSON request.", err) - } - - // Update application state - setState(payload.AccessToken, payload.RefreshToken) - - // Refresh tokens - go scheduleTokenRefresh(payload.ExpiresIn, payload.RefreshToken) - - // Ack - fmt.Fprintf(w, "Done.") - } else if req.URL.Path == "/state" && req.Method == "GET" { - // TODO(wpcarro): Ensure that this returns serialized state. - w.Header().Set("Content-type", "application/json") - state := getState() - payload, _ := json.Marshal(state) - io.WriteString(w, string(payload)) - } else { - log.Printf("Unhandled request: %v\n", *req) - } - }))) -} diff --git a/tools/monzo_ynab/tokens.nix b/tools/monzo_ynab/tokens.nix deleted file mode 100644 index 97de09d741e9..000000000000 --- a/tools/monzo_ynab/tokens.nix +++ /dev/null @@ -1,23 +0,0 @@ -{ depot, briefcase, ... }: - -let - auth = depot.buildGo.package { - name = "auth"; - srcs = [ - ./auth.go - ]; - deps = with briefcase.gopkgs; [ - utils - ]; - }; -in depot.buildGo.program { - name = "token-server"; - srcs = [ - ./tokens.go - ]; - deps = with briefcase.gopkgs; [ - kv - utils - auth - ]; -} diff --git a/tools/monzo_ynab/ynab/client.go b/tools/monzo_ynab/ynab/client.go deleted file mode 100644 index 0492b9071adc..000000000000 --- a/tools/monzo_ynab/ynab/client.go +++ /dev/null @@ -1,24 +0,0 @@ -package client - -import ( - "serde" -) - -// See requests.txt for more details. -func PostTransactions(accountID string, txs []serde.Transaction{}) error { - return map[string]string{ - "transactions": [ - { - "account_id": accountID, - "date": "2019-12-30", - "amount": 10000, - "payee_name": "Richard Stallman", - "memo": "Not so free software after all...", - "cleared": "cleared", - "approved": true, - "flag_color": "red", - "import_id": "xyz-123" - } - ] - } -} diff --git a/tools/monzo_ynab/ynab/serde.go b/tools/monzo_ynab/ynab/serde.go deleted file mode 100644 index 53dd33e83637..000000000000 --- a/tools/monzo_ynab/ynab/serde.go +++ /dev/null @@ -1,52 +0,0 @@ -// This package hosts the serialization and deserialization logic for all of the -// data types with which our application interacts from the YNAB API. -package main - -import ( - "encoding/json" - "fmt" - "time" -) - -type Transaction struct { - Id string `json:"id"` - Date time.Time `json:"date"` - Amount int `json:"amount"` - Memo string `json:"memo"` - Cleared string `json:"cleared"` - Approved bool `json:"approved"` - FlagColor string `json:"flag_color"` - AccountId string `json:"account_id"` - AccountName string `json:"account_name"` - PayeeId string `json:"payeed_id"` - PayeeName string `json:"payee_name"` - CategoryId string `json:"category_id"` - CategoryName string `json:"category_name"` - Deleted bool `json:"deleted"` - // TransferAccountId interface{} `json:"transfer_account_id"` - // TransferTransactionId interface{} `json:"transfer_transaction_id"` - // MatchedTransactionId interface{} `json:"matched_transaction_id"` - // ImportId interface{} `json:"import_id"` - // Subtransactions interface{} `json:"subtransactions"` -} - -// Attempts to encode a YNAB transaction into a string. -func serializeTx(tx *Transaction) (string, error) { - x, err := json.Marshal(tx) - return string(x), err -} - -// Attempts to parse a string encoding a transaction presumably sent from a -// YNAB server. -func deserializeTx(x string) (*Transaction, error) { - target := &Transaction{} - err := json.Unmarshal([]byte(x), target) - return target, err -} - -func main() { - target, _ := deserializeTx(tx) - out, _ := serializeTx(target) - fmt.Println(out) - fmt.Println(ynabOut) -} diff --git a/tools/nsfv-setup/default.nix b/tools/nsfv-setup/default.nix new file mode 100644 index 000000000000..98dcc61b7bc1 --- /dev/null +++ b/tools/nsfv-setup/default.nix @@ -0,0 +1,28 @@ +# Configures a running Pulseaudio instance with an LADSP filter that +# creates a noise-cancelling sink. +# +# This can be used to, for example, cancel noise from an incoming +# video conferencing audio stream. +# +# There are some caveats, for example this will not distinguish +# between noise from different participants and I have no idea what +# happens if the default sink goes away. +# +# If this script is run while an NSFV sink exists, the existing sink +# will first be removed. +{ depot, pkgs, ... }: + +let + inherit (pkgs) ripgrep pulseaudio; + inherit (depot.third_party) nsfv; +in pkgs.writeShellScriptBin "nsfv-setup" '' + export PATH="${ripgrep}/bin:${pulseaudio}/bin:$PATH" + + if pacmd list-sinks | rg librnnoise_ladspa.so >/dev/null; then + pactl unload-module module-ladspa-sink + fi + + SINK=$(${pulseaudio}/bin/pacmd info | ${ripgrep}/bin/rg -r '$1' '^Default sink name: (.*)$') + echo "Setting up NSFV filtering to sink ''${SINK}" + ${pulseaudio}/bin/pacmd load-module module-ladspa-sink sink_name=NSFV sink_master=''${SINK} label=noise_suppressor_mono plugin=${nsfv}/lib/ladspa/librnnoise_ladspa.so control=42 rate=48000 +'' diff --git a/tools/perf-flamegraph.nix b/tools/perf-flamegraph.nix new file mode 100644 index 000000000000..b472b746ff14 --- /dev/null +++ b/tools/perf-flamegraph.nix @@ -0,0 +1,12 @@ +# Script that collects perf timing for the execution of a command and writes a +# flamegraph to stdout +{ pkgs, ... }: + +pkgs.writeShellScriptBin "perf-flamegraph" '' + set -euo pipefail + + ${pkgs.linuxPackages.perf}/bin/perf record -g --call-graph dwarf -F max "$@" + ${pkgs.linuxPackages.perf}/bin/perf script \ + | ${pkgs.flamegraph}/bin/stackcollapse-perf.pl \ + | ${pkgs.flamegraph}/bin/flamegraph.pl +'' diff --git a/tools/rfcToKindle/LICENSE b/tools/rfcToKindle/LICENSE deleted file mode 100644 index 7a4a3ea2424c..000000000000 --- a/tools/rfcToKindle/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file diff --git a/tools/rfcToKindle/README.md b/tools/rfcToKindle/README.md deleted file mode 100644 index e7b4fa841ef6..000000000000 --- a/tools/rfcToKindle/README.md +++ /dev/null @@ -1,30 +0,0 @@ -# rfcToKindle - -Wirelessly transfer RFC documents to your Kindle to device for an alternative -medium for reading. - -## Installation - -`rfcToKindle` makes use of [`buildGo.nix`][2] to package itself. If you're -using [Nix][1], you can install `rfcToKindle` using `nix-env`: - -```shell -> nix-env -f https://github.com/wpcarro/rfcToKindle -i -``` - -## Usage - -```shell -> rfcToKindle -document rfc6479 -recipient username@kindle.com -``` - -## Dependencies - -This uses `sendgmr` to send the file to the Kindle. Make sure: -1. That `sendgmr` is installed and available on $PATH. -2. That it is configured to work with your preferred email address. -3. That the email address `sendgmr` is configured to use is whitelisted in - your Kindle "Personal Document Settings". - -[1]: https://nixos.org/nix/ -[2]: https://git.tazj.in/tree/nix/buildGo diff --git a/tools/rfcToKindle/default.nix b/tools/rfcToKindle/default.nix deleted file mode 100644 index 8fb93c3bb5b8..000000000000 --- a/tools/rfcToKindle/default.nix +++ /dev/null @@ -1,11 +0,0 @@ -{ depot, ... }: - -# TODO: This doesn't depend on `sendgmr` at the moment, but it should. As such, -# it's an imcomplete packaging. -depot.buildGo.program { - name = "rfcToKindle"; - srcs = [ - ./main.go - ]; - deps = []; -} diff --git a/tools/rfcToKindle/main.go b/tools/rfcToKindle/main.go deleted file mode 100644 index 0f4f2dd9ec4f..000000000000 --- a/tools/rfcToKindle/main.go +++ /dev/null @@ -1,89 +0,0 @@ -// Author: wpcarro@gmail.com -// -// Wirelessly transfer RFC documents to your Kindle to device for an alternative -// medium for reading. -// -// Usage: -// ```shell -// > go run rfcToKindle.go -document rfc6479 -recipient username@kindle.com -// ``` -// -// This uses `sendgmr` to send the file to the Kindle. Make sure: -// 1. That `sendgmr` is installed and available on $PATH. -// 2. That it is configured to work with your preferred email address. -// 3. That the email address `sendgmr` is configured to use is whitelisted in -// your Kindle "Personal Document Settings". - -package main - -import ( - "flag" - "fmt" - "io" - "io/ioutil" - "log" - "net/http" - "os" - "os/exec" - "strings" -) - -func main() { - document := flag.String("document", "", "(Required) The name of the document to fetch. For example \"RFC6479\".") - recipient := flag.String("recipient", "", "(Required) The email address of the Kindle device.") - subject := flag.String("subject", "", "(Optional) The email address of the Kindle device.") - flag.Parse() - - if *document == "" { - // TODO: Is log.Fatal the best function to use here? - log.Fatal("-document cannot be empty. See -help for more information.") - } - - if *recipient == "" { - log.Fatal("-recipient cannot be empty. See -help for more information.") - } - - *document = strings.ToLower(*document) - - url := fmt.Sprintf("https://www.ietf.org/rfc/%s.txt", *document) - resp, err := http.Get(url) - fmt.Printf("Downloading %s ... ", url) - - if err != nil { - log.Fatal(err) - } - defer resp.Body.Close() - - f, err := ioutil.TempFile("", fmt.Sprintf("%s-*.txt", *document)) - if err != nil { - log.Fatal(err) - } - // TODO: Verify if this is cleaning up or not. - defer os.Remove(f.Name()) - - _, err = io.Copy(f, resp.Body) - if err != nil { - log.Fatal(err) - } - fmt.Println("done.") - - if *subject == "" { - *subject = fmt.Sprintf("%s - Sent from rfcToKindle.go", *document) - } - - // Although I couldn't find it documented anywhere, the email sent to the - // Kindle must have a body, even if the body isn't used for anything. - fmt.Printf("Emailing %s to %s ... ", f.Name(), *recipient) - cmd := exec.Command("sendgmr", - fmt.Sprintf("--to=%s", *recipient), - fmt.Sprintf("--body_file=%s", f.Name()), - fmt.Sprintf("--subject=%s", *subject), - fmt.Sprintf("--attachment_files=%s", f.Name())) - err = cmd.Run() - if err != nil { - log.Fatal(err) - } - fmt.Println("done.") - - os.Exit(0) -} diff --git a/tools/run/.envrc b/tools/run/.envrc deleted file mode 100644 index a4a62da526d3..000000000000 --- a/tools/run/.envrc +++ /dev/null @@ -1,2 +0,0 @@ -source_up -use_nix diff --git a/tools/run/README.md b/tools/run/README.md deleted file mode 100644 index d3cccecf910c..000000000000 --- a/tools/run/README.md +++ /dev/null @@ -1,30 +0,0 @@ -# run - -Simplify the commands you call to run scripts on the command line. - -```shell -> run path/to/file.py -> run path/to/file.ts -``` - -## How? - -Define a run.json configuration mapping commands to filename extensions like -so: -```json -{ - ".ts": "npx ts-node $file", - ".py": "python3 $file" -} -``` - -Then call `run path/to/some/file.ts` on the command line, and `npx ts-node -file.ts` will run. - -## Installation - -Install `run` using Nix. - -```shell -> nix-env -iA briefcase.run -``` diff --git a/tools/run/default.nix b/tools/run/default.nix deleted file mode 100644 index 7d772c3f9079..000000000000 --- a/tools/run/default.nix +++ /dev/null @@ -1,11 +0,0 @@ -{ pkgs, depot, briefcase, ... }: - -depot.buildGo.program { - name = "run"; - srcs = [ - ./main.go - ]; - deps = with briefcase.gopkgs; [ - utils - ]; -} diff --git a/tools/run/main.go b/tools/run/main.go deleted file mode 100644 index 04906ece91f7..000000000000 --- a/tools/run/main.go +++ /dev/null @@ -1,49 +0,0 @@ -package main - -import ( - "encoding/json" - "fmt" - "io/ioutil" - "log" - "os" - "os/exec" - "path/filepath" - "strings" - "utils" -) - -func main() { - if len(os.Args) != 2 { - log.Fatal("You can only call run with a single file at a time.") - } - - rulesPath := utils.Resolve("run.json", []string{"/home/wpcarro/.config/run/run.json"}) - b, err := ioutil.ReadFile(rulesPath) - if err != nil { - log.Fatal("Could not locate a run.json file: ", err) - } - rules := map[string]string{} - err = json.Unmarshal(b, &rules) - if err != nil { - log.Fatal("Could not decode run.json as JSON: ", err) - } - - fileName := os.Args[1] - ext := filepath.Ext(fileName) - cmd, ok := rules[ext] - - if !ok { - log.Fatalf("No rules for extension, %s, have been defined.", ext) - } - - // TODO(wpcarro): Support more sophisticated parsing than just string - // splitting. To handle 'cases like this'. - tokens := strings.Split(strings.Replace(cmd, "$file", fileName, 1), " ") - c := exec.Command(tokens[0], tokens[1:]...) - err = c.Start() - // TODO(wpcarro): Forward STDERR and STDOUT. - if err != nil { - log.Fatal(err) - } - fmt.Println(c.Wait()) -} diff --git a/tools/run/shell.nix b/tools/run/shell.nix deleted file mode 100644 index e14bffae487c..000000000000 --- a/tools/run/shell.nix +++ /dev/null @@ -1,10 +0,0 @@ -let - briefcase = import <briefcase> {}; - pkgs = briefcase.third_party.pkgs; -in pkgs.mkShell { - buildInputs = with pkgs; [ - go - goimports - godef - ]; -} diff --git a/tools/rust-crates-advisory/OWNERS b/tools/rust-crates-advisory/OWNERS new file mode 100644 index 000000000000..a742d0d22bf6 --- /dev/null +++ b/tools/rust-crates-advisory/OWNERS @@ -0,0 +1,3 @@ +inherited: true +owners: + - Profpatsch diff --git a/tools/rust-crates-advisory/check-security-advisory.rs b/tools/rust-crates-advisory/check-security-advisory.rs new file mode 100644 index 000000000000..3fd9bc2dd947 --- /dev/null +++ b/tools/rust-crates-advisory/check-security-advisory.rs @@ -0,0 +1,67 @@ +extern crate semver; +extern crate toml; + +use std::io::Write; + +/// reads a security advisory of the form +/// https://github.com/RustSec/advisory-db/blob/a24932e220dfa9be8b0b501210fef8a0bc7ef43e/EXAMPLE_ADVISORY.md +/// and a crate version number, +/// and returns 0 if the crate version is patched +/// and returns 1 if the crate version is *not* patched +/// +/// If PRINT_ADVISORY is set, the advisory is printed if it matches. + +fn main() { + let mut args = std::env::args_os(); + let file = args.nth(1).expect("security advisory md file is $1"); + let crate_version = + args.nth(0).expect("crate version is $2") + .into_string().expect("crate version string not utf8") + ; + let crate_version = semver::Version::parse(&crate_version).expect(&format!("this is not a semver version: {}", &crate_version)); + let filename = file.to_string_lossy(); + + let content = std::fs::read(&file).expect(&format!("could not read {}", filename)); + let content = + std::str::from_utf8(&content).expect(&format!("file {} was not encoded as utf-8", filename)); + let content = content.trim_start(); + + let toml_start = content + .strip_prefix("```toml").expect(&format!("file did not start with ```toml: {}", filename)); + let toml_end_index = toml_start.find("```").expect(&format!("the toml section did not end, no `` found: {}", filename)); + let toml = &toml_start[..toml_end_index]; + let toml : toml::Value = toml::de::from_slice(toml.as_bytes()).expect(&format!("could not parse toml: {}", filename)); + + let versions = toml + .as_table().expect(&format!("the toml is not a table: {}", filename)) + .get("versions").expect(&format!("the toml does not contain the versions field: {}", filename)) + .as_table().expect(&format!("the toml versions field must be a table: {}", filename)); + + let unaffected = match versions.get("unaffected") { + Some(u) => u + .as_array().expect(&format!("the toml versions.unaffected field must be a list of semvers: {}", filename)) + .iter() + .map(|v| semver::VersionReq::parse(v.as_str().expect(&format!("the version field {} is not a string", v))).expect(&format!("the version field {} is not a valid semver VersionReq", v))) + .collect(), + None => vec![] + }; + + let mut patched : Vec<semver::VersionReq> = versions.get("patched").expect(&format!("the toml versions.patched field must exist: {}", filename)) + .as_array().expect(&format!("the toml versions.patched field must be a list of semvers: {}", filename)) + .iter() + .map(|v| semver::VersionReq::parse(v.as_str().expect(&format!("the version field {} is not a string", v))).expect(&format!("the version field {} is not a valid semver VersionReq", v))) + .collect(); + + patched.extend_from_slice(&unaffected[..]); + let is_patched_or_unaffected = patched.iter().any(|req| req.matches(&crate_version)); + + if is_patched_or_unaffected { + std::process::exit(0); + } else { + if std::env::var_os("PRINT_ADVISORY").is_some() { + write!(std::io::stderr(), "Advisory {} matched!\n{}\n", filename, content).unwrap(); + } + std::process::exit(1); + } + +} diff --git a/tools/rust-crates-advisory/default.nix b/tools/rust-crates-advisory/default.nix new file mode 100644 index 000000000000..c0cd4dc03e05 --- /dev/null +++ b/tools/rust-crates-advisory/default.nix @@ -0,0 +1,90 @@ +{ depot, pkgs, lib, ... }: + +let + + bins = + depot.nix.getBins pkgs.s6-portable-utils [ "s6-ln" "s6-cat" "s6-echo" "s6-mkdir" "s6-test" "s6-touch" ] + // depot.nix.getBins pkgs.lr [ "lr" ] + ; + + crate-advisories = "${depot.third_party.rustsec-advisory-db}/crates"; + + our-crates = lib.filter (v: v ? outPath) + (builtins.attrValues depot.third_party.rust-crates); + + check-security-advisory = depot.nix.writers.rustSimple { + name = "parse-security-advisory"; + dependencies = [ + depot.third_party.rust-crates.toml + depot.third_party.rust-crates.semver + ]; + } (builtins.readFile ./check-security-advisory.rs); + + # $1 is the directory with advisories for crate $2 with version $3 + check-crate-advisory = depot.nix.writeExecline "check-crate-advisory" { readNArgs = 3; } [ + "pipeline" [ bins.lr "-0" "-t" "depth == 1" "$1" ] + "forstdin" "-0" "-Eo" "0" "advisory" + "if" [ depot.tools.eprintf "advisory %s\n" "$advisory" ] + check-security-advisory "$advisory" "$3" + ]; + + # Run through everything in the `crate-advisories` repository + # and check whether we can parse all the advisories without crashing. + test-parsing-all-security-advisories = depot.nix.runExecline "check-all-our-crates" {} [ + "pipeline" [ bins.lr "-0" "-t" "depth == 1" crate-advisories ] + "if" [ + # this will succeed as long as check-crate-advisory doesn’t `panic!()` (status 101) + "forstdin" "-0" "-E" "-x" "101" "crate_advisories" + check-crate-advisory "$crate_advisories" "foo" "0.0.0" + ] + "importas" "out" "out" + bins.s6-touch "$out" + ]; + + + check-all-our-crates = depot.nix.runExecline "check-all-our-crates" { + stdin = lib.concatStrings + (map + (crate: + depot.nix.netstring.fromString + ( depot.nix.netstring.fromString crate.crateName + + depot.nix.netstring.fromString crate.version )) + our-crates); + } [ + "if" [ + "forstdin" "-o" "0" "-Ed" "" "crateNetstring" + "multidefine" "-d" "" "$crateNetstring" [ "crate" "crate_version" ] + "if" [ depot.tools.eprintf "checking %s, version %s\n" "$crate" "$crate_version" ] + + "ifthenelse" [ bins.s6-test "-d" "${crate-advisories}/\${crate}" ] + [ # also print the full advisory text if it matches + "export" "PRINT_ADVISORY" "1" + check-crate-advisory "${crate-advisories}/\${crate}" "$crate" "$crate_version" + ] + [ depot.tools.eprintf "No advisories found for crate %s\n" "$crate" ] + "importas" "-ui" "ret" "?" + # put a marker in ./failed to read at the end + "ifelse" [ bins.s6-test "$ret" "-eq" "1" ] + [ bins.s6-touch "./failed" ] + "if" [ depot.tools.eprintf "\n" ] + "exit" "$ret" + ] + "ifelse" [ bins.s6-test "-f" "./failed" ] + [ "if" [ depot.tools.eprintf "Error: Found active advisories!" ] + "exit" "1" + ] + "importas" "out" "out" + bins.s6-touch "$out" + ]; + +in depot.nix.readTree.drvTargets { + + check-all-our-crates = + depot.nix.drvSeqL + [ test-parsing-all-security-advisories ] + check-all-our-crates; + + inherit + check-crate-advisory + ; +} diff --git a/tools/simple_vim/config.vim b/tools/simple_vim/config.vim deleted file mode 100644 index ea40964ee803..000000000000 --- a/tools/simple_vim/config.vim +++ /dev/null @@ -1,98 +0,0 @@ -" My barebones vimrc without any Vundle dependencies. -" -" I'm attempting to optimize the following: -" - Minimize dependencies -" - Maximize ergonomics -" - Maximize Tmux compatibility -" - Minimize shadowing of existing Vim KBDs -" -" Warning: This is currently unstable as it is a work-in-progress. -" -" Author: William Carroll <wpcarro@gmail.com> - -" Use <Space> as the leader key. -let mapleader = " " -nnoremap <leader>ev :tabnew<CR>:edit ~/.vimrc<CR> -nnoremap <leader>sv :source ~/.vimrc<CR> -nnoremap <leader>w :w<CR> -nnoremap <leader>h :help - -" increment,decrement numbers -nnoremap + <C-a> -" TODO: Restore with better KBD -" nnoremap - <C-x> - -" Visit the CWD -nnoremap - :e .<CR> - -" Turn line numbers on. -set number - -" Easily create vertical, horizontal window splits. -nnoremap sh :vsplit<CR> -nnoremap sj :split<CR>:wincmd j<CR> -nnoremap sk :split<CR> -nnoremap sl :vsplit<CR>:wincmd l<CR> - -" Move across window splits. -" TODO: Change to <M-{h,j,k,l}>. -nnoremap <C-h> :wincmd h<CR> -nnoremap <C-j> :wincmd j<CR> -nnoremap <C-k> :wincmd k<CR> -nnoremap <C-l> :wincmd l<CR> - -" TODO: Support these. -" nnoremap <M-q> :q<CR> -" nnoremap <M-h> :wincmd h<CR> -" nnoremap <M-j> :wincmd j<CR> -" nnoremap <M-k> :wincmd k<CR> -" nnoremap <M-l> :wincmd l<CR> - -" Use <Enter> instead of G to support: -" 20<Enter> - to jump to line 20 -" d20<Enter> - to delete from the current line until line 20 -" <C-v>20<Enter> - to select from the current line until line 20 -nnoremap <Enter> G -onoremap <Enter> G -vnoremap <Enter> G - -" Easily change modes on keyboards that don't have CapsLock mapped to <Esc> -inoremap jk <ESC> - -" CRUD tabs. -nnoremap <TAB> :tabnext<CR> -nnoremap <S-TAB> :tabprevious<CR> -nnoremap <C-t> :tabnew<CR>:edit .<CR> -nnoremap <C-w> :tabclose<CR> -" TODO: Re-enable these once <M-{h,j,k,l}> are supported. -" nnoremap <C-l> :+tabmove<CR> -" nnoremap <C-h> :-tabmove<CR> - -" Use H,L to goto beggining,end of a line. -" Swaps the keys to ensure original functionality of H,L are preserved. -nnoremap H ^ -nnoremap L $ -nnoremap ^ H -nnoremap $ L - -" Use H,L in visual mode too -vnoremap H ^ -vnoremap L $ -vnoremap ^ H -vnoremap $ L - -" Emacs hybrid mode -" TODO: model this after tpope's rsi.vim (Readline-style insertion) -cnoremap <C-g> <C-c> -cnoremap <C-a> <C-b> -inoremap <C-a> <C-o>^ -inoremap <C-e> <C-o>$ -inoremap <C-b> <C-o>h -inoremap <C-f> <C-o>l - -" Indenting -" The following three settings are based on option 2 of `:help tabstop` -set tabstop=4 -set shiftwidth=4 -set expandtab -set autoindent diff --git a/tools/simple_vim/default.nix b/tools/simple_vim/default.nix deleted file mode 100644 index f8f965f2c024..000000000000 --- a/tools/simple_vim/default.nix +++ /dev/null @@ -1,15 +0,0 @@ -{ pkgs, ... }: - -let - configVim = builtins.path { - path = ./config.vim; - name = "config.vim"; - }; - - script = pkgs.writeShellScriptBin "simple_vim" '' - ${pkgs.vim}/bin/vim -u ${configVim} - ''; -in pkgs.stdenv.mkDerivation { - name = "simple_vim"; - buildInputs = [ script ]; -} diff --git a/tools/symlinkManager/README.md b/tools/symlinkManager/README.md deleted file mode 100644 index b0fc58c8e989..000000000000 --- a/tools/symlinkManager/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Dotfile Symlink Manager - -Find and delete all symlinks to the dotfiles defined in `$BRIEFCASE`. - -Oftentimes I corrupt the state of my configuration files. The intention with -this script is to help me clean things up when this happens. An example workflow -might look like: - -```shell -> symlink-mgr --audit -> symlink-mgr --seriously -> briefcase # changes directory to $BRIEFCASE -> make install -``` diff --git a/tools/symlinkManager/default.nix b/tools/symlinkManager/default.nix deleted file mode 100644 index 16bb26bb3c2e..000000000000 --- a/tools/symlinkManager/default.nix +++ /dev/null @@ -1,11 +0,0 @@ -{ depot, briefcase, ... }: - -depot.buildGo.program { - name = "symlink-mgr"; - srcs = [ - ./main.go - ]; - deps = with briefcase.gopkgs; [ - utils - ]; -} diff --git a/tools/symlinkManager/main.go b/tools/symlinkManager/main.go deleted file mode 100644 index e682867fb850..000000000000 --- a/tools/symlinkManager/main.go +++ /dev/null @@ -1,82 +0,0 @@ -package main - -import ( - "errors" - "flag" - "fmt" - "log" - "os" - "path/filepath" - "strings" - "utils" -) - -var hostnames = map[string]string{ - os.Getenv("DESKTOP"): "desktop", - os.Getenv("LAPTOP"): "work_laptop", -} - -func main() { - audit := flag.Bool("audit", false, "Output all symlinks that would be deleted. This is the default behavior. This option is mutually exclusive with the --seriously option.") - seriously := flag.Bool("seriously", false, "Actually delete the symlinks. This option is mutually exclusive with the --audit option.") - repoName := flag.String("repo-name", "briefcase", "The name of the repository.") - deviceOnly := flag.Bool("device-only", false, "Only output the device-specific dotfiles.") - flag.Parse() - - if !*audit && !*seriously { - log.Fatal(errors.New("Either -audit or -seriously needs to be set.")) - } - if *audit == *seriously { - log.Fatal(errors.New("Arguments -audit and -seriously are mutually exclusive")) - } - - home, err := os.UserHomeDir() - utils.FailOn(err) - count := 0 - - err = filepath.Walk(home, func(path string, info os.FileInfo, err error) error { - if utils.IsSymlink(info.Mode()) { - dest, err := os.Readlink(path) - utils.FailOn(err) - - var predicate func(string) bool - - if *deviceOnly { - predicate = func(dest string) bool { - var hostname string - hostname, err = os.Hostname() - utils.FailOn(err) - seeking, ok := hostnames[hostname] - if !ok { - log.Fatal(fmt.Sprintf("Hostname \"%s\" not supported in the hostnames map.", hostname)) - } - return strings.Contains(dest, *repoName) && strings.Contains(dest, seeking) - } - } else { - predicate = func(dest string) bool { - return strings.Contains(dest, *repoName) - } - } - - if predicate(dest) { - if *audit { - fmt.Printf("%s -> %s\n", path, dest) - } else if *seriously { - fmt.Printf("rm %s\n", path) - err = os.Remove(path) - utils.FailOn(err) - } - count += 1 - } - } - return nil - }) - utils.FailOn(err) - if *audit { - fmt.Printf("Would have deleted %d symlinks.\n", count) - } else if *seriously { - fmt.Printf("Successfully deleted %d symlinks.\n", count) - } - - os.Exit(0) -} diff --git a/tools/tvlc/OWNERS b/tools/tvlc/OWNERS new file mode 100644 index 000000000000..9e7830ab215e --- /dev/null +++ b/tools/tvlc/OWNERS @@ -0,0 +1,3 @@ +inherited: true +owners: + - riking diff --git a/tools/tvlc/common.sh b/tools/tvlc/common.sh new file mode 100644 index 000000000000..fe7605857fd3 --- /dev/null +++ b/tools/tvlc/common.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +set -eu +set -o pipefail + +source path-scripts + +XDG_DATA_HOME="${XDG_DATA_HOME:-$HOME/.local/share}" +tvlc_root="$XDG_DATA_HOME/tvlc" + +nice_checkout_root= +if [ -f "$tvlc_root"/nice_checkout_root ]; then + nice_checkout_root="$(cat "$tvlc_root"/nice_checkout_root)" +fi +nice_checkout_root="${nice_checkout_root:-$HOME/tvlc}" + +depot_root= +if [ -f "$tvlc_root/depot_root" ]; then + depot_root="$(cat "$tvlc_root/depot_root")" +fi +if [ -d /depot ]; then + # don't require config on tvl nixos servers + depot_root="${depot_root:-/depot}" +fi +if [ -n "$depot_root" ]; then + export DEPOT_ROOT="$depot_root" +fi + +if [ ! -d "$tvlc_root" ]; then + echo "tvlc: setup required" + echo "please run 'tvlc setup' from the depot root" + exit 1 +fi diff --git a/tools/tvlc/default.nix b/tools/tvlc/default.nix new file mode 100644 index 000000000000..f40f30a44e33 --- /dev/null +++ b/tools/tvlc/default.nix @@ -0,0 +1,50 @@ +{ pkgs, depot, ... }: + +let + pathScripts = pkgs.writeShellScript "imports" '' + export tvix_instantiate="${depot.third_party.nix}/bin/nix-instantiate" + export depot_scanner="${depot.tools.depot-scanner}/bin/depot-scanner" + ''; + + # setup: git rev-parse --show-toplevel > $tvlc_root/depot_root + # setup: mkdir $tvlc_root/clients + # setup: echo 1 > $tvlc_root/next_clientid + + commonsh = pkgs.stdenv.mkDerivation { + name = "common.sh"; + src = ./common.sh; + doCheck = true; + unpackPhase = "true"; + buildPhase = '' + substitute ${./common.sh} $out --replace path-scripts ${pathScripts} + ''; + checkPhase = '' + ${pkgs.shellcheck}/bin/shellcheck $out ${pathScripts} && echo "SHELLCHECK OK" + ''; + installPhase = '' + chmod +x $out + ''; + }; + + tvlcNew = pkgs.stdenv.mkDerivation { + name = "tvlc-new"; + src = ./tvlc-new; + doCheck = true; + + unpackPhase = "true"; + buildPhase = '' + substitute ${./tvlc-new} $out --replace common.sh ${commonsh} + ''; + checkPhase = '' + ${pkgs.shellcheck}/bin/shellcheck $out ${commonsh} ${pathScripts} && echo "SHELLCHECK OK" + ''; + installPhase = '' + chmod +x $out + ''; + }; + +in { + inherit pathScripts; + inherit commonsh; + inherit tvlcNew; +} diff --git a/tools/tvlc/tvlc-new b/tools/tvlc/tvlc-new new file mode 100755 index 000000000000..4ef0df5d33b2 --- /dev/null +++ b/tools/tvlc/tvlc-new @@ -0,0 +1,103 @@ +#!/bin/bash + +source common.sh + +set -eu +set -o pipefail + +function usage() { + echo "tvlc new [-n|--name CLIENTNAME] [derivation...]" + echo "" + cat <<EOF + The 'new' command creates a new git sparse checkout with the given name, and + contents needed to build the Nix derivation(s) specified on the command line. + + Options: + -n/--name client-name: Sets the git branch and nice checkout name for the + workspace. If the option is not provided, the name will be based on the + first non-option command-line argument. + --branch branch-name: Sets the git branch name only. +EOF +} + +checkout_name= +branch_name= + +options=$(getopt -o 'n:' --long debug --long name: -- "$@") +eval set -- "$options" +while true; do + case "$1" in + -h) + usage + exit 0 + ;; + -v) + version + exit 0 + ;; + -n|--name) + shift + checkout_name="$1" + if [ -z "$branch_name" ]; then + branch_name=tvlc-"$1" + fi + ;; + --branch) + shift + branch_name="$1" + ;; + --) + shift + break + ;; + esac + shift +done + +if [ $# -eq 0 ]; then + echo "error: workspace name, target derivations required" + exit 1 +fi + +if [ -z "$checkout_name" ]; then + # TODO(riking): deduce + echo "error: workspace name (-n) required" + exit 1 +fi + +if [ -d "$nice_checkout_root/$checkout_name" ]; then + echo "error: checkout $checkout_name already exists" + # nb: shellescape checkout_name because we expect the user to copy-paste it + # shellcheck disable=SC1003 + echo "consider deleting it with tvlc remove '${checkout_name/'/\'}'" + exit 1 +fi +if [ -f "$DEPOT_ROOT/.git/refs/heads/$branch_name" ]; then + echo "error: branch $branch_name already exists in git" + # shellcheck disable=SC1003 + echo "consider deleting it with cd $DEPOT_ROOT; git branch -d '${checkout_name/'/\'}'" + exit 1 +fi + +# The big one: call into Nix to figure out what paths the desired derivations depend on. +readarray -t includedPaths < <("$depot_scanner" --mode 'print' --only 'DEPOT' --relpath --depot "$DEPOT_ROOT" --nix-bin "$tvix_instantiate" "$@") + +# bash math +checkout_id=$(("$(cat "$tvlc_root/next_clientid")")) +next_checkout_id=$(("$checkout_id"+1)) +echo "$next_checkout_id" > "$tvlc_root/next_clientid" + +checkout_dir="$tvlc_root/clients/$checkout_id" +mkdir "$checkout_dir" +cd "$DEPOT_ROOT" +git worktree add --no-checkout -b "$branch_name" "$checkout_dir" +# BUG: git not creating the /info/ subdir +mkdir "$DEPOT_ROOT/.git/worktrees/$checkout_id/info" + +cd "$checkout_dir" +git sparse-checkout init --cone +git sparse-checkout set "${includedPaths[@]}" + +ln -s "$checkout_dir" "$nice_checkout_root"/"$checkout_name" + +echo "$nice_checkout_root/$checkout_name" diff --git a/tools/url-blocker/.envrc b/tools/url-blocker/.envrc deleted file mode 100644 index a4a62da526d3..000000000000 --- a/tools/url-blocker/.envrc +++ /dev/null @@ -1,2 +0,0 @@ -source_up -use_nix diff --git a/tools/url-blocker/Main.hs b/tools/url-blocker/Main.hs deleted file mode 100644 index 926412ce91f9..000000000000 --- a/tools/url-blocker/Main.hs +++ /dev/null @@ -1,205 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE NamedFieldPuns #-} -{-# LANGUAGE DeriveGeneric #-} -module Main ( main ) where - --------------------------------------------------------------------------------- --- Dependencies --------------------------------------------------------------------------------- - -import qualified Data.Maybe as Maybe -import qualified Data.Time.Clock as Clock -import qualified Data.Time.Calendar as Calendar -import qualified Data.Time.LocalTime as LocalTime -import qualified Data.ByteString.Lazy as LazyByteString -import qualified Data.Aeson as Aeson -import qualified Data.Either.Combinators as Either -import qualified Data.HashMap.Strict as HashMap -import qualified Data.Text as Text -import qualified Data.Text.IO as TextIO -import qualified Data.Text.Read as TextRead -import qualified Data.List as List - -import GHC.Generics -import Data.Aeson ((.:)) -import Data.Text (Text) - --------------------------------------------------------------------------------- --- Types --------------------------------------------------------------------------------- - -newtype URL = URL { getURL :: Text } deriving (Show, Eq, Generic) - -newtype IPAddress = IPAddress { getIPAddress :: Text } deriving (Show) - -newtype Domain = Domain { getDomain :: Text } deriving (Show) - -newtype Hour = Hour { getHour :: Int } deriving (Show, Eq, Generic) - -newtype Minute = Minute { getMinute :: Int } deriving (Show, Eq, Generic) - -data EtcHostsEntry = EtcHostsEntry { ip :: IPAddress - , domains :: [Domain] - } deriving (Show) - --- | Write these in terms of your system's local time (i.e. `date`). -data TimeSlot = TimeSlot { beg :: (Hour, Minute) - , end :: (Hour, Minute) - } deriving (Show, Eq, Generic) - -data Allowance = Allowance { day :: Calendar.DayOfWeek - , timeslots :: [TimeSlot] - } deriving (Show, Eq, Generic) - -data Rule = Rule { urls :: [URL] - , allowed :: [Allowance] - } deriving (Show, Eq, Generic) - --------------------------------------------------------------------------------- --- Instances --------------------------------------------------------------------------------- - -instance Aeson.FromJSON TimeSlot where - parseJSON = Aeson.withText "timeslot" $ \x -> do - let [a, b] = Text.splitOn "-" x - [ah, am] = Text.splitOn ":" a - [bh, bm] = Text.splitOn ":" b - case extractTimeSlot ah am bh bm of - Left s -> fail s - Right x -> pure x - where - extractTimeSlot :: Text -> Text -> Text -> Text -> Either String TimeSlot - extractTimeSlot ah am bh bm = do - (begh, _) <- TextRead.decimal ah - (begm, _) <- TextRead.decimal am - (endh, _) <- TextRead.decimal bh - (endm, _) <- TextRead.decimal bm - pure $ TimeSlot{ beg = (Hour begh, Minute begm) - , end = (Hour endh, Minute endm) - } - -instance Aeson.FromJSON Allowance where - parseJSON = Aeson.withObject "allowance" $ \x -> do - day <- x .: "day" - timeslots <- x .: "timeslots" - pure $ Allowance{day, timeslots} - -instance Aeson.FromJSON URL where - parseJSON = Aeson.withText "URL" $ \x -> do - pure $ URL { getURL = x } - -instance Aeson.FromJSON Rule where - parseJSON = Aeson.withObject "rule" $ \x -> do - urls <- x .: "urls" - allowed <- x .: "allowed" - pure Rule{urls, allowed} - --------------------------------------------------------------------------------- --- Functions --------------------------------------------------------------------------------- - --- | Pipe operator -(|>) :: a -> (a -> b) -> b -(|>) a f = f a -infixl 1 |> - --- | Returns True if the current time falls within any of the `timeslots`. -isWithinTimeSlot :: LocalTime.LocalTime -> [TimeSlot] -> Bool -isWithinTimeSlot date timeslots = - List.any withinTimeSlot timeslots - where - withinTimeSlot :: TimeSlot -> Bool - withinTimeSlot TimeSlot{ beg = (Hour ah, Minute am) - , end = (Hour bh, Minute bm) - } = - let LocalTime.TimeOfDay{LocalTime.todHour, LocalTime.todMin} = - LocalTime.localTimeOfDay date - in (todHour > ah) && (todMin > am) && (todHour < bh) && (todMin < bm) - --- | Returns True if `day` is the same day as today. -isToday :: LocalTime.LocalTime -> Calendar.DayOfWeek -> Bool -isToday date day = today == day - where - today = Calendar.dayOfWeek (LocalTime.localDay date) - --- | Returns True if a list of none of the `allowances` are valid. -shouldBeBlocked :: LocalTime.LocalTime -> [Allowance] -> Bool -shouldBeBlocked _ [] = True -shouldBeBlocked date allowances = do - case filter (isToday date . day) allowances of - [Allowance{timeslots}] -> not $ isWithinTimeSlot date timeslots - [] -> True - -- Error when more than one rule per day - _ -> True - --- | Maps an EtcHostsEntry to the line of text url-blocker will append to /etc/hosts. -serializeEtcHostEntry :: EtcHostsEntry -> Text -serializeEtcHostEntry EtcHostsEntry{ip, domains} = - (getIPAddress ip) <> "\t" <> (Text.unwords $ fmap getDomain domains) - --- | Create an EtcHostsEntry mapping the URLs in `rule` to 127.0.0.1 if the --- URLs should be blocked. -maybeBlockURL :: LocalTime.LocalTime -> Rule -> Maybe EtcHostsEntry -maybeBlockURL date Rule{urls, allowed} = - if shouldBeBlocked date allowed then - Just $ EtcHostsEntry { ip = IPAddress "127.0.0.1" - , domains = fmap (Domain . getURL) urls - } - else - Nothing - --- | Read and parse the rules.json file. --- TODO(wpcarro): Properly handle errors for file not found. --- TODO(wpcarro): Properly handle errors for parse failures. --- TODO(wpcarro): How can we resolve the $HOME directory when this is run as --- root? -getRules :: IO [Rule] -getRules = do - contents <- LazyByteString.readFile "/home/wpcarro/.config/url-blocker/rules.json" - let payload = Aeson.eitherDecode contents - pure $ Either.fromRight [] payload - --- | Informational header added to /etc/hosts before the entries that --- url-blocker adds. -urlBlockerHeader :: Text -urlBlockerHeader = - Text.unlines [ "################################################################################" - , "# Added by url-blocker." - , "#" - , "# Warning: url-blocker will remove anything that you add beneath this header." - , "################################################################################" - ] - --- | Removes all entries that url-blocker may have added to /etc/hosts. -removeURLBlockerEntries :: Text -> Text -removeURLBlockerEntries etcHosts = - case Text.breakOn urlBlockerHeader etcHosts of - (etcHosts', _) -> etcHosts' - --- | Appends the newly created `entries` to `etcHosts`. -addURLBlockerEntries :: Text -> Text -> Text -addURLBlockerEntries entries etcHosts = - Text.unlines [ etcHosts - , urlBlockerHeader - , entries - ] - --- | This script reads the current /etc/hosts, removes any entries that --- url-blocker may have added in a previous run, and adds new entries to block --- URLs according to the rules.json file. -main :: IO () -main = do - rules <- getRules - tz <- LocalTime.getCurrentTimeZone - ct <- Clock.getCurrentTime - let date = LocalTime.utcToLocalTime tz ct - entries = rules - |> fmap (maybeBlockURL date) - |> Maybe.catMaybes - |> fmap serializeEtcHostEntry - |> Text.unlines - existingEtcHosts <- TextIO.readFile "/etc/hosts" - existingEtcHosts - |> removeURLBlockerEntries - |> addURLBlockerEntries entries - |> \x -> writeFile "/etc/hosts" (Text.unpack x) diff --git a/tools/url-blocker/README.md b/tools/url-blocker/README.md deleted file mode 100644 index 1b7fea8c15e0..000000000000 --- a/tools/url-blocker/README.md +++ /dev/null @@ -1,47 +0,0 @@ -# url-blocker - -`url-blocker` blocks the URLs that you want to block when you want it to block -them. - -Let's say that you don't want to visit Twitter during the work week. Create the -file `~/.config/url-blocker/rules.json` with the following contents and -`url-blocker` will take care of the rest. - -```json -# ~/.config/url-blocker/rules.json -[ - { - "urls": [ - "twitter.com", - "www.twitter.com", - ], - "allowed": [ - { - "day": "Saturday", - "timeslots": [ - "00:00-11:59" - ] - }, - { - "day": "Sunday", - "timeslots": [ - "00:00-11:59" - ] - } - ] - } -] -``` - -## Installation - -```shell -$ nix-env -iA 'briefcase.tools.url-blocker' -``` - -## How does it work? - -`systemd` is intended to run `url-blocker` once every minute. `url-blocker` will -read `/etc/hosts` and map the URLs defined in `rules.json` to `127.0.0.1` when -you want them blocked. Because `systemd` run once every minute, `/etc/hosts` -should be current to the minute as well. diff --git a/tools/url-blocker/default.nix b/tools/url-blocker/default.nix deleted file mode 100644 index 943644e5f542..000000000000 --- a/tools/url-blocker/default.nix +++ /dev/null @@ -1,33 +0,0 @@ -{ pkgs, ... }: - -let - ghc = pkgs.haskellPackages.ghcWithPackages (hpkgs: [ - hpkgs.time - hpkgs.aeson - hpkgs.either - ]); - - # This is the systemd service unit - service = pkgs.stdenv.mkDerivation { - name = "url-blocker"; - src = builtins.path { path = ./.; name = "url-blocker"; }; - buildPhase = '' - ${ghc}/bin/ghc Main.hs - ''; - installPhase = '' - mv ./Main $out - ''; - }; - - # This is the systemd timer unit. - # Run once every minute. - # Give root privilege. - systemdUnit = { - systemd = { - timers.simple-timer = { - wantedBy = [ "timers.target" ]; - partOf = []; - }; - }; - }; -in null diff --git a/tools/url-blocker/rules.json b/tools/url-blocker/rules.json deleted file mode 100644 index 95e4dc9a90c1..000000000000 --- a/tools/url-blocker/rules.json +++ /dev/null @@ -1,28 +0,0 @@ -[ - { - "urls": [ - "facebook.com", - "www.facebook.com", - "twitter.com", - "www.twitter.com", - "youtube.com", - "www.youtube.com", - "instagram.com", - "www.instagram.com" - ], - "allowed": [] - }, - { - "urls": [ - "chat.googleplex.com" - ], - "allowed": [ - { - "day": "Sunday", - "timeslots": [ - "18:35-18:39" - ] - } - ] - } -] diff --git a/tools/url-blocker/shell.nix b/tools/url-blocker/shell.nix deleted file mode 100644 index 1adc566c0121..000000000000 --- a/tools/url-blocker/shell.nix +++ /dev/null @@ -1,10 +0,0 @@ -let - briefcase = import <briefcase> {}; -in briefcase.buildHaskell.shell { - deps = hpkgs: with hpkgs; [ - time - aeson - either - hspec - ]; -} |