diff options
Diffstat (limited to 'users/Profpatsch')
32 files changed, 2988 insertions, 0 deletions
diff --git a/users/Profpatsch/OWNERS b/users/Profpatsch/OWNERS new file mode 100644 index 000000000000..5a73d4c3a1fd --- /dev/null +++ b/users/Profpatsch/OWNERS @@ -0,0 +1,4 @@ +inherited: false +owners: + - Profpatsch + - sterni diff --git a/users/Profpatsch/advent-of-code/2020/01/main.py b/users/Profpatsch/advent-of-code/2020/01/main.py new file mode 100644 index 000000000000..e636017a54d5 --- /dev/null +++ b/users/Profpatsch/advent-of-code/2020/01/main.py @@ -0,0 +1,22 @@ +import sys + +l = [] +with open('./input', 'r') as f: + for line in f: + l.append(int(line)) + +s = set(l) + +res=None +for el in s: + for el2 in s: + if (2020-(el+el2)) in s: + res=(el, el2, 2020-(el+el2)) + break + +if res == None: + sys.exit("could not find a number that adds to 2020") + +print(res) + +print(res[0] * res[1] * res[2]) diff --git a/users/Profpatsch/advent-of-code/2020/02/main.py b/users/Profpatsch/advent-of-code/2020/02/main.py new file mode 100644 index 000000000000..e3b27c382a21 --- /dev/null +++ b/users/Profpatsch/advent-of-code/2020/02/main.py @@ -0,0 +1,77 @@ +import sys + +def parse(line): + a = line.split(sep=" ", maxsplit=1) + assert len(a) == 2 + fromto = a[0].split(sep="-") + assert len(fromto) == 2 + (from_, to) = (int(fromto[0]), int(fromto[1])) + charpass = a[1].split(sep=": ") + assert len(charpass) == 2 + char = charpass[0] + assert len(char) == 1 + pass_ = charpass[1] + assert pass_.endswith("\n") + pass_ = pass_[:-1] + return { + "from": from_, + "to": to, + "char": char, + "pass": pass_ + } + +def char_in_pass(char, pass_): + return pass_.count(char) + +def validate_01(entry): + no = char_in_pass(entry["char"], entry["pass"]) + if no < entry["from"]: + return { "too-small": entry } + elif no > entry["to"]: + return { "too-big": entry } + else: + return { "ok": entry } + +def char_at_pos(char, pos, pass_): + assert pos <= len(pass_) + return pass_[pos-1] == char + +def validate_02(entry): + one = char_at_pos(entry["char"], entry["from"], entry["pass"]) + two = char_at_pos(entry["char"], entry["to"], entry["pass"]) + if one and two: + return { "both": entry } + elif one: + return { "one": entry } + elif two: + return { "two": entry } + else: + return { "none": entry } + + +res01 = [] +res02 = [] +with open("./input", 'r') as f: + for line in f: + p = parse(line) + res01.append(validate_01(p)) + res02.append(validate_02(p)) + +count01=0 +for r in res01: + print(r) + if r.get("ok", False): + count01=count01+1 + +count02=0 +for r in res02: + print(r) + if r.get("one", False): + count02=count02+1 + elif r.get("two", False): + count02=count02+1 + else: + pass + +print("count 1: {}".format(count01)) +print("count 2: {}".format(count02)) diff --git a/users/Profpatsch/advent-of-code/2020/03/main.py b/users/Profpatsch/advent-of-code/2020/03/main.py new file mode 100644 index 000000000000..4d6baf946c3e --- /dev/null +++ b/users/Profpatsch/advent-of-code/2020/03/main.py @@ -0,0 +1,66 @@ +import itertools +import math + +def tree_line(init): + return { + "init-len": len(init), + "known": '', + "rest": itertools.repeat(init) + } + +def tree_line_at(pos, tree_line): + needed = (pos + 1) - len(tree_line["known"]) + # internally advance the tree line to the position requested + if needed > 0: + tree_line["known"] = tree_line["known"] \ + + ''.join( + itertools.islice( + tree_line["rest"], + 1+math.floor(needed / tree_line["init-len"]))) + # print(tree_line) + return tree_line["known"][pos] == '#' + +def tree_at(linepos, pos, trees): + return tree_line_at(pos, trees[linepos]) + +def slope_positions(trees, right, down): + line = 0 + pos = 0 + while line < len(trees): + yield (line, pos) + line = line + down + pos = pos + right + +trees = [] +with open("./input", 'r') as f: + for line in f: + line = line.rstrip() + trees.append(tree_line(line)) + +# print(list(itertools.islice(trees[0], 5))) +# print(list(map( +# lambda x: tree_at(0, x, trees), +# range(100) +# ))) +# print(list(slope_positions(trees, right=3, down=1))) + +def count_slope_positions(trees, slope): + count = 0 + for (line, pos) in slope: + if tree_at(line, pos, trees): + count = count + 1 + return count + +print( + count_slope_positions(trees, slope_positions(trees, right=1, down=1)) + * + count_slope_positions(trees, slope_positions(trees, right=3, down=1)) + * + count_slope_positions(trees, slope_positions(trees, right=5, down=1)) + * + count_slope_positions(trees, slope_positions(trees, right=7, down=1)) + * + count_slope_positions(trees, slope_positions(trees, right=1, down=2)) +) + +# I realized I could have just used a modulo instead … diff --git a/users/Profpatsch/advent-of-code/2020/04/main.py b/users/Profpatsch/advent-of-code/2020/04/main.py new file mode 100644 index 000000000000..36bbed7146d6 --- /dev/null +++ b/users/Profpatsch/advent-of-code/2020/04/main.py @@ -0,0 +1,104 @@ +import sys +import itertools +import re +import pprint + +def get_entry(fd): + def to_dict(keyval): + res = {} + for (k, v) in keyval: + assert k not in res + res[k] = v + return res + + res = [] + for line in fd: + if line == "\n": + yield to_dict(res) + res = [] + else: + line = line.rstrip() + items = line.split(" ") + for i in items: + res.append(i.split(":", maxsplit=2)) + +def val_hgt(hgt): + m = re.fullmatch(r'([0-9]+)(cm|in)', hgt) + if m: + (i, what) = m.group(1,2) + i = int(i) + if what == "cm": + return i >= 150 and i <= 193 + elif what == "in": + return i >= 59 and i <= 76 + else: + return False + +required_fields = [ + { "name": "byr", + "check": lambda s: int(s) >= 1920 and int(s) <= 2002 + }, + { "name": "iyr", + "check": lambda s: int(s) >= 2010 and int(s) <= 2020 + }, + { "name": "eyr", + "check": lambda s: int(s) >= 2020 and int(s) <= 2030, + }, + { "name": "hgt", + "check": lambda s: val_hgt(s) + }, + { "name": "hcl", + "check": lambda s: re.fullmatch(r'#[0-9a-f]{6}', s) + }, + { "name": "ecl", + "check": lambda s: re.fullmatch(r'amb|blu|brn|gry|grn|hzl|oth', s) + }, + { "name": "pid", + "check": lambda s: re.fullmatch(r'[0-9]{9}', s) + }, + # we should treat it as not required + # "cid" +] + +required_dict = {} +for f in required_fields: + required_dict[f["name"]] = f + +def validate(keyval): + if keyval[0] not in required_dict: + return { "ok": keyval } + if required_dict[keyval[0]]["check"](keyval[1]): + return { "ok": keyval } + else: + return { "validation": keyval } + +def all_fields(entry): + missing = [] + for r in required_dict: + if r not in e: + missing.append(r) + if missing == []: + return { "ok": entry } + else: + return { "missing": missing } + +count=0 +for e in get_entry(sys.stdin): + a = all_fields(e) + if a.get("ok", False): + res = {} + bad = False + for keyval in e.items(): + r = validate(keyval) + if r.get("validation", False): + bad = True + res[keyval[0]] = r + if bad: + pprint.pprint({ "validation": res }) + else: + pprint.pprint({ "ok": e }) + count = count+1 + else: + pprint.pprint(a) + +print(count) diff --git a/users/Profpatsch/arglib/default.nix b/users/Profpatsch/arglib/default.nix new file mode 100644 index 000000000000..b263654ac3e0 --- /dev/null +++ b/users/Profpatsch/arglib/default.nix @@ -0,0 +1,44 @@ +{ depot, pkgs, lib, ... }: + +let + netencode = { + rust = depot.users.Profpatsch.writers.rustSimpleLib { + name = "arglib-netencode"; + dependencies = [ + depot.users.Profpatsch.execline.exec-helpers + depot.users.Profpatsch.netencode.netencode-rs + ]; + } '' + extern crate netencode; + extern crate exec_helpers; + + use netencode::{T}; + use std::os::unix::ffi::OsStrExt; + + pub fn arglib_netencode(prog_name: &str, env: Option<&std::ffi::OsStr>) -> T { + let env = match env { + None => std::ffi::OsStr::from_bytes("ARGLIB_NETENCODE".as_bytes()), + Some(a) => a + }; + let t = match std::env::var_os(env) { + None => exec_helpers::die_user_error(prog_name, format!("could not read args, envvar {} not set", env.to_string_lossy())), + // TODO: good error handling for the different parser errors + Some(soup) => match netencode::parse::t_t(soup.as_bytes()) { + Ok((remainder, t)) => match remainder.is_empty() { + true => t, + false => exec_helpers::die_environment_problem(prog_name, format!("arglib: there was some unparsed bytes remaining: {:?}", remainder)) + }, + Err(err) => exec_helpers::die_environment_problem(prog_name, format!("arglib parsing error: {:?}", err)) + } + }; + std::env::remove_var(env); + t + } + ''; + }; + +in { + inherit + netencode + ; +} diff --git a/users/Profpatsch/emacs-tree-sitter-move/default.nix b/users/Profpatsch/emacs-tree-sitter-move/default.nix new file mode 100644 index 000000000000..fdc059c089b6 --- /dev/null +++ b/users/Profpatsch/emacs-tree-sitter-move/default.nix @@ -0,0 +1,3 @@ +# nothing yet (TODO: expose shell & tool) +{...}: +{} diff --git a/users/Profpatsch/emacs-tree-sitter-move/shell.nix b/users/Profpatsch/emacs-tree-sitter-move/shell.nix new file mode 100644 index 000000000000..81d622ac73e5 --- /dev/null +++ b/users/Profpatsch/emacs-tree-sitter-move/shell.nix @@ -0,0 +1,16 @@ +{ pkgs ? import ../../../third_party {}, ... }: +let + inherit (pkgs) lib; + + treeSitterGrammars = pkgs.runCommandLocal "grammars" {} '' + mkdir -p $out/bin + ${lib.concatStringsSep "\n" + (lib.mapAttrsToList (name: src: "ln -s ${src}/parser $out/bin/${name}.so") pkgs.tree-sitter.builtGrammars)}; + ''; + +in pkgs.mkShell { + buildInputs = [ + pkgs.tree-sitter.builtGrammars.python + ]; + TREE_SITTER_GRAMMAR_DIR = treeSitterGrammars; +} diff --git a/users/Profpatsch/emacs-tree-sitter-move/test.json b/users/Profpatsch/emacs-tree-sitter-move/test.json new file mode 100644 index 000000000000..d9f8075976d6 --- /dev/null +++ b/users/Profpatsch/emacs-tree-sitter-move/test.json @@ -0,0 +1,14 @@ +{ + "foo": { + "x": [ 1, 2, 3, 4 ], + "bar": "test" + }, + "foo": { + "x": [ 1, 2, 3, 4 ], + "bar": "test" + }, + "foo": { + "x": [ 1, 2, 3, 4 ], + "bar": "test" + } +} diff --git a/users/Profpatsch/emacs-tree-sitter-move/test.py b/users/Profpatsch/emacs-tree-sitter-move/test.py new file mode 100644 index 000000000000..0f57bae035da --- /dev/null +++ b/users/Profpatsch/emacs-tree-sitter-move/test.py @@ -0,0 +1,13 @@ +(4 + 5 + 5) + +def foo(a, b, c) + +def bar(a, b): + 4 + 4 + 4 + +[1, 4, 5, 10] + +def foo(): + pass diff --git a/users/Profpatsch/emacs-tree-sitter-move/test.sh b/users/Profpatsch/emacs-tree-sitter-move/test.sh new file mode 100644 index 000000000000..681081f5909d --- /dev/null +++ b/users/Profpatsch/emacs-tree-sitter-move/test.sh @@ -0,0 +1,14 @@ +function foo () { + local x=123 +} + +function bar () { + local x=123 +} + +echo abc def \ + gef gef + +printf \ + "%s\n" \ + haha diff --git a/users/Profpatsch/emacs-tree-sitter-move/tmp.el b/users/Profpatsch/emacs-tree-sitter-move/tmp.el new file mode 100644 index 000000000000..88d13fa45b81 --- /dev/null +++ b/users/Profpatsch/emacs-tree-sitter-move/tmp.el @@ -0,0 +1,28 @@ +(defun tree-sitter-load-from-grammar-dir (grammar-dir sym lang-name) + (tree-sitter-load + sym + (format "%s/bin/%s" + (getenv grammar-dir) + lang-name))) + +(defun tree-sitter-init-tmp-langs (alist) + (mapcar + (lambda (lang) + (pcase-let ((`(,name ,sym ,mode) lang)) + (tree-sitter-load-from-grammar-dir "TREE_SITTER_GRAMMAR_DIR" sym name) + (cons mode sym))) + alist)) + + +(setq tree-sitter-major-mode-language-alist + (tree-sitter-init-tmp-langs + '(("python" python python-mode) + ("json" json js-mode) + ("bash" bash sh-mode) + ))) + +(define-key evil-normal-state-map (kbd "C-.") #'tree-sitter-move-reset) +(define-key evil-normal-state-map (kbd "C-<right>") #'tree-sitter-move-right) +(define-key evil-normal-state-map (kbd "C-<left>") #'tree-sitter-move-left) +(define-key evil-normal-state-map (kbd "C-<up>") #'tree-sitter-move-up) +(define-key evil-normal-state-map (kbd "C-<down>") #'tree-sitter-move-down) diff --git a/users/Profpatsch/emacs-tree-sitter-move/tree-sitter-move.el b/users/Profpatsch/emacs-tree-sitter-move/tree-sitter-move.el new file mode 100644 index 000000000000..907e1e4081bc --- /dev/null +++ b/users/Profpatsch/emacs-tree-sitter-move/tree-sitter-move.el @@ -0,0 +1,139 @@ +;; this is not an actual cursor, just a node. +;; It’s not super efficient, but cursors can’t be *set* to an arbitrary +;; subnode, because they can’t access the parent otherwise. +;; We’d need a way to reset the cursor and walk down to the node?! +(defvar-local tree-sitter-move--cursor nil + "the buffer-local cursor used for movement") + +(defvar-local tree-sitter-move--debug-overlay nil + "an overlay used to visually display the region currently marked by the cursor") + +;;;;; TODO: should everything use named nodes? Only some things? +;;;;; maybe there should be a pair of functions for everything? +;;;;; For now restrict to named nodes. + +(defun tree-sitter-move--setup () + ;; TODO + (progn + ;; TODO: if tree-sitter-mode fails to load, display a better error + (tree-sitter-mode t) + (setq tree-sitter-move--cursor (tsc-root-node tree-sitter-tree)) + (add-variable-watcher + 'tree-sitter-move--cursor + #'tree-sitter-move--debug-overlay-update))) + +(defun tree-sitter-move--debug-overlay-update (sym newval &rest _args) + "variable-watcher to update the debug overlay when the cursor changes" + (let ((start (tsc-node-start-position newval)) + (end (tsc-node-end-position newval))) + (symbol-macrolet ((o tree-sitter-move--debug-overlay)) + (if o + (move-overlay o start end) + (setq o (make-overlay start end)) + (overlay-put o 'face 'highlight) + )))) + +(defun tree-sitter-move--debug-overlay-teardown () + "Turn of the overlay visibility and delete the overlay object" + (when tree-sitter-move--debug-overlay + (delete-overlay tree-sitter-move--debug-overlay) + (setq tree-sitter-move--debug-overlay nil))) + +(defun tree-sitter-move--teardown () + (setq tree-sitter-move--cursor nil) + (tree-sitter-move--debug-overlay-teardown) + (tree-sitter-mode nil)) + +;; Get the syntax node the cursor is on. +(defun tsc-get-named-node-at-point () + (let ((p (point))) + (tsc-get-named-descendant-for-position-range + (tsc-root-node tree-sitter-tree) p p))) + +;; TODO: is this function necessary? +;; Maybe tree-sitter always guarantees that parents are named? +(defun tsc-get-named-parent (node) + (when-let ((parent (tsc-get-parent node))) + (while (and parent (not (tsc-node-named-p parent))) + (setq parent (tsc-get-parent parent))) + parent)) + +(defun tsc-get-first-named-node-with-siblings-up (node) + "Returns the first 'upwards' node that has siblings. That includes the current + node, so if the given node has siblings, it is returned. Returns nil if there + is no such node until the root" + (when-let ((has-siblings-p + (lambda (parent-node) + (> (tsc-count-named-children parent-node) + 1))) + (cur node) + (parent (tsc-get-named-parent node))) + (while (and parent (not (funcall has-siblings-p parent))) + (setq cur parent) + (setq parent (tsc-get-named-parent cur))) + cur)) + +(defun tree-sitter-move--set-cursor-to-node (node) + (setq tree-sitter-move--cursor node)) + +(defun tree-sitter-move--set-cursor-to-node-at-point () + (tree-sitter-move--set-cursor-to-node (tsc-get-named-node-at-point))) + +(defun tree-sitter-move--move-point-to-node (node) + (set-window-point + (selected-window) + (tsc-node-start-position node))) + + +;; interactive commands (“do what I expect” section) + +(defun tree-sitter-move-reset () + (interactive) + (tree-sitter-move--set-cursor-to-node-at-point)) + +(defun tree-sitter-move-right () + (interactive) + (tree-sitter-move--move-skip-non-sibling-nodes 'tsc-get-next-named-sibling)) + +(defun tree-sitter-move-left () + (interactive) + (tree-sitter-move--move-skip-non-sibling-nodes 'tsc-get-prev-named-sibling)) + +(defun tree-sitter-move-up () + (interactive) + (tree-sitter-move--move-skip-non-sibling-nodes 'tsc-get-parent)) + +;; TODO: does not skip siblings yet, because the skip function only goes up (not down) +(defun tree-sitter-move-down () + (interactive) + (tree-sitter-move--move-if-possible (lambda (n) (tsc-get-nth-named-child n 0)))) + +(defun tree-sitter-move--move-skip-non-sibling-nodes (move-fn) + "Moves to the sidewards next sibling. If the current node does not have siblings, go + upwards until something has siblings and then move to the side (right or left)." + (tree-sitter-move--move-if-possible + (lambda (cur) + (when-let ((with-siblings + (tsc-get-first-named-node-with-siblings-up cur))) + (funcall move-fn with-siblings))))) + +(defun tree-sitter-move--move-if-possible (dir-fn) + (let ((next (funcall dir-fn tree-sitter-move--cursor))) + (when next + (tree-sitter-move--set-cursor-to-node next) + (tree-sitter-move--move-point-to-node next)))) + +; mostly stolen from tree-sitter-mode +;;;###autoload +(define-minor-mode tree-sitter-move-mode + "Minor mode to do cursor movements via tree-sitter" + :init-value nil + :lighter " tree-sitter-move" + (if tree-sitter-move-mode + (tree-sitter--error-protect + (progn + (tree-sitter-move--setup)) + (setq tree-sitter-move-mode nil) + (tree-sitter-move--teardown)) + (lambda ()) + (tree-sitter-move--teardown))) diff --git a/users/Profpatsch/execline/default.nix b/users/Profpatsch/execline/default.nix new file mode 100644 index 000000000000..852fcfcfa005 --- /dev/null +++ b/users/Profpatsch/execline/default.nix @@ -0,0 +1,12 @@ +{ depot, pkgs, lib, ... }: + +let + exec-helpers = depot.users.Profpatsch.writers.rustSimpleLib { + name = "exec-helpers"; + } (builtins.readFile ./exec_helpers.rs); + +in { + inherit + exec-helpers + ; +} diff --git a/users/Profpatsch/execline/exec_helpers.rs b/users/Profpatsch/execline/exec_helpers.rs new file mode 100644 index 000000000000..b9e1f5797386 --- /dev/null +++ b/users/Profpatsch/execline/exec_helpers.rs @@ -0,0 +1,113 @@ +use std::os::unix::process::CommandExt; +use std::ffi::OsStr; +use std::os::unix::ffi::{OsStringExt, OsStrExt}; + +pub fn no_args(current_prog_name: &str) -> () { + let mut args = std::env::args_os(); + // remove argv[0] + let _ = args.nth(0); + if args.len() > 0 { + die_user_error(current_prog_name, format!("Expected no arguments, got {:?}", args.collect::<Vec<_>>())) + } +} + +pub fn args(current_prog_name: &str, no_of_positional_args: usize) -> Vec<Vec<u8>> { + let mut args = std::env::args_os(); + // remove argv[0] + let _ = args.nth(0); + if args.len() != no_of_positional_args { + die_user_error(current_prog_name, format!("Expected {} arguments, got {}, namely {:?}", no_of_positional_args, args.len(), args.collect::<Vec<_>>())) + } + args.map(|arg| arg.into_vec()).collect() +} + +pub fn args_for_exec(current_prog_name: &str, no_of_positional_args: usize) -> (Vec<Vec<u8>>, Vec<Vec<u8>>) { + let mut args = std::env::args_os(); + // remove argv[0] + let _ = args.nth(0); + let mut args = args.map(|arg| arg.into_vec()); + let mut pos_args = vec![]; + // get positional args + for i in 1..no_of_positional_args+1 { + pos_args.push( + args.nth(0).expect( + &format!("{}: expects {} positional args, only got {}", current_prog_name, no_of_positional_args, i)) + ); + } + // prog... is the rest of the iterator + let prog : Vec<Vec<u8>> = args.collect(); + (pos_args, prog) +} + +pub fn exec_into_args<'a, 'b, Args, Arg, Env, Key, Val>(current_prog_name: &str, args: Args, env_additions: Env) -> ! + where + Args: IntoIterator<Item = Arg>, + Arg: AsRef<[u8]>, + Env: IntoIterator<Item = (Key, Val)>, + Key: AsRef<[u8]>, + Val: AsRef<[u8]>, +{ + // TODO: is this possible without collecting into a Vec first, just leaving it an IntoIterator? + let args = args.into_iter().collect::<Vec<Arg>>(); + let mut args = args.iter().map(|v| OsStr::from_bytes(v.as_ref())); + let prog = args.nth(0).expect(&format!("{}: first argument must be an executable", current_prog_name)); + // TODO: same here + let env = env_additions.into_iter().collect::<Vec<(Key, Val)>>(); + let env = env.iter().map(|(k,v)| (OsStr::from_bytes(k.as_ref()), OsStr::from_bytes(v.as_ref()))); + let err = std::process::Command::new(prog).args(args).envs(env).exec(); + die_missing_executable(current_prog_name, format!("exec failed: {}, while trying to execing into {:?}", err, prog)); +} + +/// Exit 1 to signify a generic expected error +/// (e.g. something that sometimes just goes wrong, like a nix build). +pub fn die_expected_error<S>(current_prog_name: &str, msg: S) -> ! +where S: AsRef<str> +{ + die_with(1, current_prog_name, msg) +} + +/// Exit 100 to signify a user error (“the user is holding it wrong”). +/// This is a permanent error, if the program is executed the same way +/// it should crash with 100 again. +pub fn die_user_error<S>(current_prog_name: &str, msg: S) -> ! +where S: AsRef<str> +{ + die_with(100, current_prog_name, msg) +} + +/// Exit 101 to signify an unexpected crash (failing assertion or panic). +/// This is the same exit code that `panic!()` emits. +pub fn die_panic<S>(current_prog_name: &str, msg: S) -> ! +where S: AsRef<str> +{ + die_with(101, current_prog_name, msg) +} + +/// Exit 111 to signify a temporary error (such as resource exhaustion) +pub fn die_temporary<S>(current_prog_name: &str, msg: S) -> ! +where S: AsRef<str> +{ + die_with(111, current_prog_name, msg) +} + +/// Exit 126 to signify an environment problem +/// (the user has set up stuff incorrectly so the program cannot work) +pub fn die_environment_problem<S>(current_prog_name: &str, msg: S) -> ! +where S: AsRef<str> +{ + die_with(126, current_prog_name, msg) +} + +/// Exit 127 to signify a missing executable. +pub fn die_missing_executable<S>(current_prog_name: &str, msg: S) -> ! +where S: AsRef<str> +{ + die_with(127, current_prog_name, msg) +} + +fn die_with<S>(status: i32, current_prog_name: &str, msg: S) -> ! + where S: AsRef<str> +{ + eprintln!("{}: {}", current_prog_name, msg.as_ref()); + std::process::exit(status) +} diff --git a/users/Profpatsch/lib.nix b/users/Profpatsch/lib.nix new file mode 100644 index 000000000000..db81c2dfe321 --- /dev/null +++ b/users/Profpatsch/lib.nix @@ -0,0 +1,49 @@ +{ depot, pkgs, ... }: +let + bins = depot.nix.getBins pkgs.coreutils [ "printf" "echo" "cat" "printenv" ] + // depot.nix.getBins pkgs.fdtools [ "multitee" ] + ; + + debugExec = msg: depot.nix.writeExecline "debug-exec" {} [ + "if" [ + "fdmove" "-c" "1" "2" + "if" [ bins.printf "%s: " msg ] + "if" [ bins.echo "$@" ] + ] + "$@" + ]; + + eprintf = depot.nix.writeExecline "eprintf" {} [ + "fdmove" "-c" "1" "2" bins.printf "$@" + ]; + + eprint-stdin = depot.nix.writeExecline "eprint-stdin" {} [ + "pipeline" [ bins.multitee "0-1,2" ] "$@" + ]; + + eprintenv = depot.nix.writeExecline "eprintenv" { readNArgs = 1; } [ + "ifelse" [ "fdmove" "-c" "1" "2" bins.printenv "$1" ] + [ "$@" ] + "if" [ eprintf "eprintenv: could not find \"\${1}\" in the environment\n" ] + "$@" + ]; + + # remove everything but a few selected environment variables + runInEmptyEnv = keepVars: + let + importas = pkgs.lib.concatMap (var: [ "importas" "-i" var var ]) keepVars; + # we have to explicitely call export here, because PATH is probably empty + export = pkgs.lib.concatMap (var: [ "${pkgs.execline}/bin/export" var ''''${${var}}'' ]) keepVars; + in depot.nix.writeExecline "empty-env" {} + (importas ++ [ "emptyenv" ] ++ export ++ [ "${pkgs.execline}/bin/exec" "$@" ]); + + +in { + inherit + debugExec + eprintf + eprint-stdin + eprintenv + runInEmptyEnv + ; +} diff --git a/users/Profpatsch/netencode/README.md b/users/Profpatsch/netencode/README.md new file mode 100644 index 000000000000..3058e36eaf5c --- /dev/null +++ b/users/Profpatsch/netencode/README.md @@ -0,0 +1,111 @@ +# netencode 0.1-unreleased + +[bencode][] and [netstring][]-inspired pipe format that should be trivial go generate correctly in every context (only requires a `byte_length()` and a `printf()`), easy to parse (100 lines of code or less), mostly human-decipherable for easy debugging, and support nested record and sum types. + + +## scalars + +Scalars have the format `[type prefix][size]:[value],`. + +where size is a natural number without leading zeroes. + +### unit + +The unit (`u`) has only one value. + +* The unit is: `u,` + +### numbers + +Naturals (`n`) and Integers (`i`), with a maximum size in bits. + +Bit sizes are specified in 2^n increments, 1 to 9 (`n1`..`n9`, `i1`..`n9`). + +* Natural `1234` that fits in 32 bits (2^5): `n5:1234,` +* Integer `-42` that fits in 8 bits (2^3): `i3:-42,` +* Integer `23` that fits in 64 bits (2^6): `i6:23,` +* Integer `-1` that fits in 512 bits (2^9): `i9:-1,` +* Natural `0` that fits in 1 bit (2^1): `n1:0,` + +An implementation can define the biggest numbers it supports, and has to throw an error for anything bigger. It has to support everything smaller, so for example if you support up to i6/n6, you have to support 1–6 as well. An implementation could support up to the current architecture’s wordsize for example. + +Floats are not supported, you can implement fixed-size decimals or ratios using integers. + +### booleans + +A boolean is represented as `n1`. + +* `n1:0,`: false +* `n1:1,`: true + +TODO: should we add `f,` and `t,`? + +### text + +Text (`t`) that *must* be encoded as UTF-8, starting with its length in bytes: + +* The string `hello world` (11 bytes): `t11:hello world,` +* The string `今日は` (9 bytes): `t9:今日は,` +* The string `:,` (2 bytes): `t2::,,` +* The empty sting `` (0 bytes): `t0:,` + +### binary + +Arbitrary binary strings (`b`) that can contain any data, starting with its length in bytes. + +* The ASCII string `hello world` as binary data (11 bytes): `b11:hello world,` +* The empty binary string (0 bytes): `b0:,` +* The bytestring with `^D` (1 byte): `b1:,` + +Since the binary strings are length-prefixd, they can contain `\0` and no escaping is required. Care has to be taken in languages with `\0`-terminated bytestrings. + +Use text (`t`) if you have utf-8 encoded data. + +## tagged values + +### tags + +A tag (`<`) gives a value a name. The tag is UTF-8 encoded, starting with its length in bytes and proceeding with the value. + +* The tag `foo` (3 bytes) tagging the text `hello` (5 bytes): `<3:foo|t5:hello,` +* The tag `` (0 bytes) tagging the 8-bit integer 0: `<0:|i3:0,` + +### records (products/records), also maps + +A record (`{`) is a concatenation of tags (`<`). It needs to be closed with `}`. +If tag names repeat the later ones should be ignored. Ordering does not matter. + +Similar to text, records start with the length of their *whole encoded content*, in bytes. This makes it possible to treat their contents as opaque bytestrings. + +* There is no empty record. (TODO: make the empty record the unit type, remove `u,`?) +* A record with one empty field, `foo`: `{9:<3:foo|u,}` +* A record with two fields, `foo` and `x`: `{21:<3:foo|u,<1:x|t3:baz,}` +* The same record: `{21:<1:x|t3:baz,<3:foo|u,}` +* The same record (later occurences of fields are ignored): `{28:<1:x|t3:baz,<3:foo|u,<1:x|u,}` + +### sums (tagged unions) + +Simply a tagged value. The tag marker `<` indicates it is a sum if it appears outside of a record. + +## lists + +A list (`[`) imposes an ordering on a sequence of values. It needs to be closed with `]`. Values in it are simply concatenated. + +Similar to records, lists start with the length of their whole encoded content. + +* The empty list: `[0:]` +* The list with one element, the string `foo`: `[7:t3:foo,]` +* The list with text `foo` followed by i3 `-42`: `[14:t3:foo,i3:-42,]` +* The list with `Some` and `None` tags: `[33:<4:Some|t3:foo,<4None|u,<4None|u,]` + +## motivation + +TODO + +## guarantees + +TODO: do I want unique representation (bijection like bencode?) This would put more restrictions on the generator, like sorting records in lexicographic order, but would make it possible to compare without decoding + + +[bencode]: https://en.wikipedia.org/wiki/Bencode +[netstring]: https://en.wikipedia.org/wiki/Netstring diff --git a/users/Profpatsch/netencode/default.nix b/users/Profpatsch/netencode/default.nix new file mode 100644 index 000000000000..294e3b4395da --- /dev/null +++ b/users/Profpatsch/netencode/default.nix @@ -0,0 +1,136 @@ +{ depot, pkgs, lib, ... }: + +let + imports = { + inherit (depot.users.Profpatsch) + writers; + }; + + netencode-rs = imports.writers.testRustSimple + (imports.writers.rustSimpleLib { + name = "netencode"; + dependencies = [ + depot.users.Profpatsch.rust-crates.nom + depot.users.Profpatsch.execline.exec-helpers + ]; + release = false; + verbose = true; + } (builtins.readFile ./netencode.rs)); + + gen = import ./gen.nix { inherit lib; }; + + cfg-if = pkgs.buildRustCrate { + pname = "cfg-if"; + version = "1.0.0"; + crateName = "cfg-if"; + sha256 = "1fzidq152hnxhg4lj6r2gv4jpnn8yivp27z6q6xy7w6v0dp6bai9"; + }; + + log = pkgs.buildRustCrate { + pname = "log"; + version = "0.4.11"; + crateName = "log"; + sha256 = "0m6xhqxsps5mgd7r91g5mqkndbh8zbjd58p7w75r330zl4n40l07"; + dependencies = [ cfg-if ]; + }; + + serde_derive = pkgs.buildRustCrate { + pname = "serde"; + crateName = "serde"; + version = "1.0.123"; + sha256 = "05xl2s1vpf3p7fi2yc9qlzw88d5ap0z3qmhmd7axa6pp9pn1s5xc"; + }; + + serde = pkgs.buildRustCrate { + pname = "serde"; + crateName = "serde"; + version = "1.0.123"; + sha256 = "05xl2s1vpf3p7fi2yc9qlzw88d5ap0z3qmhmd7axa6pp9pn1s5xc"; + features = [ "std" ]; + }; + + mustache = pkgs.buildRustCrate { + pname = "mustache"; + version = "0.9.0"; + crateName = "mustache"; + sha256 = "1zgl8l15i19lzp90icgwyi6zqdd31b9vm8w129f41d1zd0hs7ayq"; + dependencies = [ log serde ]; + }; + + netencode-mustache = imports.writers.rustSimple { + name = "netencode_mustache"; + dependencies = [ + depot.users.Profpatsch.arglib.netencode.rust + netencode-rs + mustache + ]; + } (builtins.readFile ./netencode-mustache.rs); + + + record-get = imports.writers.rustSimple { + name = "record-get"; + dependencies = [ + netencode-rs + depot.users.Profpatsch.execline.exec-helpers + depot.users.Profpatsch.arglib.netencode.rust + ]; + } '' + extern crate netencode; + extern crate arglib_netencode; + extern crate exec_helpers; + use netencode::{encode, dec}; + use netencode::dec::{Decoder, DecodeError}; + + fn main() { + let mut buf = vec![]; + let args = exec_helpers::args("record-get", 1); + let field = match std::str::from_utf8(&args[0]) { + Ok(f) => f, + Err(_e) => exec_helpers::die_user_error("record-get", format!("The field name needs to be valid unicode")) + }; + let u = netencode::u_from_stdin_or_die_user_error("record-get", &mut buf); + match (dec::RecordDot {field, inner: dec::AnyU }).dec(u) { + Ok(u) => encode(&mut std::io::stdout(), &u).expect("encoding to stdout failed"), + Err(DecodeError(err)) => exec_helpers::die_user_error("record-get", err) + } + } + ''; + + record-splice-env = imports.writers.rustSimple { + name = "record-splice-env"; + dependencies = [ + netencode-rs + depot.users.Profpatsch.execline.exec-helpers + ]; + } '' + extern crate netencode; + extern crate exec_helpers; + use netencode::dec::{Record, Try, ScalarAsBytes, Decoder, DecodeError}; + + fn main() { + let mut buf = vec![]; + let u = netencode::u_from_stdin_or_die_user_error("record-splice-env", &mut buf); + let (_, prog) = exec_helpers::args_for_exec("record-splice-env", 0); + match Record(Try(ScalarAsBytes)).dec(u) { + Ok(map) => { + exec_helpers::exec_into_args( + "record-splice-env", + prog, + // some elements can’t be decoded as scalars, so just ignore them + map.into_iter().filter_map(|(k, v)| v.map(|v2| (k, v2))) + ); + }, + Err(DecodeError(err)) => exec_helpers::die_user_error("record-splice-env", err), + } + } + ''; + +in { + inherit + netencode-rs + netencode-mustache + record-get + record-splice-env + gen + ; +} diff --git a/users/Profpatsch/netencode/gen.nix b/users/Profpatsch/netencode/gen.nix new file mode 100644 index 000000000000..305ff7b08dd6 --- /dev/null +++ b/users/Profpatsch/netencode/gen.nix @@ -0,0 +1,69 @@ +{ lib }: +let + + netstring = tag: suffix: s: + "${tag}${toString (builtins.stringLength s)}:${s}${suffix}"; + + unit = "u,"; + + n1 = b: if b then "n1:1," else "n1:0,"; + + n = i: n: "n${toString i}:${toString n},"; + i = i: n: "i${toString i}:${toString n},"; + + n3 = n 3; + n6 = n 6; + n7 = n 7; + + i3 = i 3; + i6 = i 6; + i7 = i 7; + + text = netstring "t" ","; + binary = netstring "b" ","; + + tag = key: val: netstring "<" "|" key + val; + + concatStrings = builtins.concatStringsSep ""; + + record = lokv: netstring "{" "}" + (concatStrings (map ({key, val}: tag key val) lokv)); + + list = l: netstring "[" "]" (concatStrings l); + + dwim = val: + let match = { + "bool" = n1; + "int" = i6; + "string" = text; + "set" = attrs: + # it could be a derivation, then just return the path + if attrs.type or "" == "derivation" then text "${attrs}" + else + record (lib.mapAttrsToList + (k: v: { + key = k; + val = dwim v; + }) attrs); + "list" = l: list (map dwim l); + }; + in match.${builtins.typeOf val} val; + +in { + inherit + unit + n1 + n3 + n6 + n7 + i3 + i6 + i7 + text + binary + tag + record + list + dwim + ; +} diff --git a/users/Profpatsch/netencode/netencode-mustache.rs b/users/Profpatsch/netencode/netencode-mustache.rs new file mode 100644 index 000000000000..ee7bafed2250 --- /dev/null +++ b/users/Profpatsch/netencode/netencode-mustache.rs @@ -0,0 +1,53 @@ +extern crate netencode; +extern crate mustache; +extern crate arglib_netencode; + +use mustache::{Data}; +use netencode::{T}; +use std::collections::HashMap; +use std::os::unix::ffi::{OsStrExt}; +use std::io::{Read}; + +fn netencode_to_mustache_data_dwim(t: T) -> Data { + match t { + // TODO: good idea? + T::Unit => Data::Null, + T::N1(b) => Data::Bool(b), + T::N3(u) => Data::String(u.to_string()), + T::N6(u) => Data::String(u.to_string()), + T::N7(u) => Data::String(u.to_string()), + T::I3(i) => Data::String(i.to_string()), + T::I6(i) => Data::String(i.to_string()), + T::I7(i) => Data::String(i.to_string()), + T::Text(s) => Data::String(s), + T::Binary(b) => unimplemented!(), + T::Sum(tag) => unimplemented!(), + T::Record(xs) => Data::Map( + xs.into_iter() + .map(|(key, val)| (key, netencode_to_mustache_data_dwim(val))) + .collect::<HashMap<_,_>>() + ), + T::List(xs) => Data::Vec( + xs.into_iter() + .map(|x| netencode_to_mustache_data_dwim(x)) + .collect::<Vec<_>>() + ), + } +} + +pub fn from_stdin() -> () { + let data = netencode_to_mustache_data_dwim( + arglib_netencode::arglib_netencode("netencode-mustache", Some(std::ffi::OsStr::new("TEMPLATE_DATA"))) + ); + let mut stdin = String::new(); + std::io::stdin().read_to_string(&mut stdin).unwrap(); + mustache::compile_str(&stdin) + .and_then(|templ| templ.render_data( + &mut std::io::stdout(), + &data + )).unwrap() +} + +pub fn main() { + from_stdin() +} diff --git a/users/Profpatsch/netencode/netencode.rs b/users/Profpatsch/netencode/netencode.rs new file mode 100644 index 000000000000..28003260925c --- /dev/null +++ b/users/Profpatsch/netencode/netencode.rs @@ -0,0 +1,776 @@ +extern crate nom; +extern crate exec_helpers; + +use std::collections::HashMap; +use std::io::{Write, Read}; +use std::fmt::{Display, Debug}; + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum T { + // Unit + Unit, + // Boolean + N1(bool), + // Naturals + N3(u8), + N6(u64), + N7(u128), + // Integers + I3(i8), + I6(i64), + I7(i128), + // Text + // TODO: make into &str + Text(String), + // TODO: rename to Bytes + Binary(Vec<u8>), + // Tags + // TODO: make into &str + Sum(Tag<String, T>), + // TODO: make into &str + Record(HashMap<String, T>), + List(Vec<T>), +} + +impl T { + pub fn to_u<'a>(&'a self) -> U<'a> { + match self { + T::Unit => U::Unit, + T::N1(b) => U::N1(*b), + T::N3(u) => U::N3(*u), + T::N6(u) => U::N6(*u), + T::N7(u) => U::N7(*u), + T::I3(i) => U::I3(*i), + T::I6(i) => U::I6(*i), + T::I7(i) => U::I7(*i), + T::Text(t) => U::Text(t.as_str()), + T::Binary(v) => U::Binary(v), + T::Sum(Tag { tag, val }) => U::Sum( + Tag { tag: tag.as_str(), val: Box::new(val.to_u()) } + ), + T::Record(map) => U::Record( + map.iter().map(|(k, v)| (k.as_str(), v.to_u())).collect() + ), + T::List(l) => U::List( + l.iter().map(|v| v.to_u()).collect::<Vec<U<'a>>>() + ), + } + } + + pub fn encode<'a>(&'a self) -> Vec<u8> { + match self { + // TODO: don’t go via U, inefficient + o => o.to_u().encode() + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum U<'a> { + Unit, + // Boolean + N1(bool), + // Naturals + N3(u8), + N6(u64), + N7(u128), + // Integers + I3(i8), + I6(i64), + I7(i128), + // Text + Text(&'a str), + Binary(&'a [u8]), + // Tags + // TODO: the U-recursion we do here means we can’t be breadth-lazy anymore + // like we originally planned; maybe we want to go `U<'a>` → `&'a [u8]` again? + Sum(Tag<&'a str, U<'a>>), + Record(HashMap<&'a str, U<'a>>), + List(Vec<U<'a>>), +} + +impl<'a> U<'a> { + pub fn encode(&self) -> Vec<u8> { + let mut c = std::io::Cursor::new(vec![]); + encode(&mut c, self); + c.into_inner() + } + + pub fn to_t(&self) -> T { + match self { + U::Unit => T::Unit, + U::N1(b) => T::N1(*b), + U::N3(u) => T::N3(*u), + U::N6(u) => T::N6(*u), + U::N7(u) => T::N7(*u), + U::I3(i) => T::I3(*i), + U::I6(i) => T::I6(*i), + U::I7(i) => T::I7(*i), + U::Text(t) => T::Text((*t).to_owned()), + U::Binary(v) => T::Binary((*v).to_owned()), + U::Sum(Tag { tag, val }) => T::Sum( + Tag { tag: (*tag).to_owned(), val: Box::new(val.to_t()) } + ), + U::Record(map) => T::Record( + map.iter().map(|(k, v)| ((*k).to_owned(), v.to_t())).collect::<HashMap<String, T>>() + ), + U::List(l) => T::List( + l.iter().map(|v| v.to_t()).collect::<Vec<T>>() + ), + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct Tag<S, A> { + // TODO: make into &str + pub tag: S, + pub val: Box<A> +} + +impl<S, A> Tag<S, A> { + fn map<F, B>(self, f: F) -> Tag<S, B> + where F: Fn(A) -> B { + Tag { + tag: self.tag, + val: Box::new(f(*self.val)) + } + } +} + +fn encode_tag<W: Write>(w: &mut W, tag: &str, val: &U) -> std::io::Result<()> { + write!(w, "<{}:{}|", tag.len(), tag)?; + encode(w, val)?; + Ok(()) +} + +pub fn encode<W: Write>(w: &mut W, u: &U) -> std::io::Result<()> { + match u { + U::Unit => write!(w, "u,"), + U::N1(b) => if *b { write!(w, "n1:1,") } else { write!(w, "n1:0,") }, + U::N3(n) => write!(w, "n3:{},", n), + U::N6(n) => write!(w, "n6:{},", n), + U::N7(n) => write!(w, "n7:{},", n), + U::I3(i) => write!(w, "i3:{},", i), + U::I6(i) => write!(w, "i6:{},", i), + U::I7(i) => write!(w, "i7:{},", i), + U::Text(s) => { + write!(w, "t{}:", s.len()); + w.write(s.as_bytes()); + write!(w, ",") + } + U::Binary(s) => { + write!(w, "b{}:", s.len()); + w.write(&s); + write!(w, ",") + }, + U::Sum(Tag{tag, val}) => encode_tag(w, tag, val), + U::Record(m) => { + let mut c = std::io::Cursor::new(vec![]); + for (k, v) in m { + encode_tag(&mut c, k, v)?; + } + write!(w, "{{{}:", c.get_ref().len())?; + w.write(c.get_ref())?; + write!(w, "}}") + }, + U::List(l) => { + let mut c = std::io::Cursor::new(vec![]); + for u in l { + encode(&mut c, u)?; + } + write!(w, "[{}:", c.get_ref().len())?; + w.write(c.get_ref())?; + write!(w, "]") + } + } +} + +pub fn text(s: String) -> T { + T::Text(s) +} + +pub fn u_from_stdin_or_die_user_error<'a>(prog_name: &'_ str, stdin_buf: &'a mut Vec<u8>) -> U<'a> { + std::io::stdin().lock().read_to_end(stdin_buf); + let u = match parse::u_u(stdin_buf) { + Ok((rest, u)) => match rest { + b"" => u, + _ => exec_helpers::die_user_error(prog_name, format!("stdin contained some soup after netencode value: {:?}", rest)) + }, + Err(err) => exec_helpers::die_user_error(prog_name, format!("unable to parse netencode from stdin: {:?}", err)) + }; + u +} + +pub mod parse { + use super::{T, Tag, U}; + + use std::str::FromStr; + use std::ops::Neg; + use std::collections::HashMap; + + use nom::{IResult}; + use nom::bytes::complete::{tag, take}; + use nom::branch::{alt}; + use nom::character::complete::{digit1, char}; + use nom::sequence::{tuple}; + use nom::combinator::{map, map_res, flat_map, map_parser, opt}; + use nom::error::{context, ErrorKind, ParseError}; + + fn unit_t(s: &[u8]) -> IResult<&[u8], ()> { + let (s, _) = context("unit", tag("u,"))(s)?; + Ok((s, ())) + } + + fn usize_t(s: &[u8]) -> IResult<&[u8], usize> { + context( + "usize", + map_res( + map_res(digit1, |n| std::str::from_utf8(n)), + |s| s.parse::<usize>()) + )(s) + } + + fn sized(begin: char, end: char) -> impl Fn(&[u8]) -> IResult<&[u8], &[u8]> { + move |s: &[u8]| { + let (s, (_, len, _)) = tuple(( + char(begin), + usize_t, + char(':') + ))(s)?; + let (s, (res, _)) = tuple(( + take(len), + char(end) + ))(s)?; + Ok((s, res)) + } + } + + + fn uint_t<'a, I: FromStr + 'a>(t: &'static str) -> impl Fn(&'a [u8]) -> IResult<&'a [u8], I> { + move |s: &'a [u8]| { + let (s, (_, _, int, _)) = tuple(( + tag(t.as_bytes()), + char(':'), + map_res( + map_res(digit1, |n: &[u8]| std::str::from_utf8(n)), + |s| s.parse::<I>() + ), + char(',') + ))(s)?; + Ok((s, int)) + } + } + + fn bool_t<'a>() -> impl Fn(&'a [u8]) -> IResult<&'a [u8], bool> { + context("bool", alt(( + map(tag("n1:0,"), |_| false), + map(tag("n1:1,"), |_| true), + ))) + } + + fn int_t<'a, I: FromStr + Neg<Output=I>>(t: &'static str) -> impl Fn(&'a [u8]) -> IResult<&[u8], I> { + context( + t, + move |s: &'a [u8]| { + let (s, (_, _, neg, int, _)) = tuple(( + tag(t.as_bytes()), + char(':'), + opt(char('-')), + map_res( + map_res(digit1, |n: &[u8]| std::str::from_utf8(n)), + |s| s.parse::<I>() + ), + char(',') + ))(s)?; + let res = match neg { + Some(_) => -int, + None => int, + }; + Ok((s, res)) + } + ) + } + + fn tag_t(s: &[u8]) -> IResult<&[u8], Tag<String, T>> { + // recurses into the main parser + map(tag_g(t_t), + |Tag {tag, val}| + Tag { + tag: tag.to_string(), + val + })(s) + } + + fn tag_g<'a, P, O>(inner: P) -> impl Fn(&'a [u8]) -> IResult<&'a [u8], Tag<&'a str, O>> + where + P: Fn(&'a [u8]) -> IResult<&'a [u8], O> + { + move |s: &[u8]| { + let (s, tag) = sized('<', '|')(s)?; + let (s, val) = inner(s)?; + Ok((s, Tag { + tag: std::str::from_utf8(tag) + .map_err(|_| nom::Err::Failure((s, ErrorKind::Char)))?, + val: Box::new(val) + })) + + } + } + + /// parse text scalar (`t5:hello,`) + fn text(s: &[u8]) -> IResult<&[u8], T> { + let (s, res) = text_g(s)?; + Ok((s, T::Text(res.to_string()))) + } + + fn text_g(s: &[u8]) -> IResult<&[u8], &str> { + let (s, res) = sized('t', ',')(s)?; + Ok((s, + std::str::from_utf8(res) + .map_err(|_| nom::Err::Failure((s, ErrorKind::Char)))?, + )) + } + + fn binary<'a>() -> impl Fn(&'a [u8]) -> IResult<&'a [u8], T> { + map(binary_g(), |b| T::Binary(b.to_owned())) + } + + fn binary_g() -> impl Fn(&[u8]) -> IResult<&[u8], &[u8]> { + sized('b', ',') + } + + fn list_t(s: &[u8]) -> IResult<&[u8], Vec<T>> { + list_g(t_t)(s) + } + + fn list_g<'a, P, O>(inner: P) -> impl Fn(&'a [u8]) -> IResult<&'a [u8], Vec<O>> + where + O: Clone, + P: Fn(&'a [u8]) -> IResult<&'a [u8], O> + { + map_parser( + sized('[', ']'), + nom::multi::many0(inner) + ) + } + + fn record_t<'a>(s: &'a [u8]) -> IResult<&'a [u8], HashMap<String, T>> { + let (s, r) = record_g(t_t)(s)?; + Ok((s, + r.into_iter() + .map(|(k, v)| (k.to_string(), v)) + .collect::<HashMap<_,_>>())) + } + + fn record_g<'a, P, O>(inner: P) -> impl Fn(&'a [u8]) -> IResult<&'a [u8], HashMap<&'a str, O>> + where + O: Clone, + P: Fn(&'a [u8]) -> IResult<&'a [u8], O> + { + map_parser( + sized('{', '}'), + nom::multi::fold_many1( + tag_g(inner), + HashMap::new(), + |mut acc: HashMap<_,_>, Tag { tag, mut val }| { + // ignore duplicated tag names that appear later + // according to netencode spec + if ! acc.contains_key(tag) { + acc.insert(tag, *val); + } + acc + } + ) + ) + } + + pub fn u_u(s: &[u8]) -> IResult<&[u8], U> { + alt(( + map(text_g, U::Text), + map(binary_g(), U::Binary), + map(unit_t, |()| U::Unit), + map(tag_g(u_u), |t| U::Sum(t)), + map(list_g(u_u), U::List), + map(record_g(u_u), U::Record), + + map(bool_t(), |u| U::N1(u)), + map(uint_t("n3"), |u| U::N3(u)), + map(uint_t("n6"), |u| U::N6(u)), + map(uint_t("n7"), |u| U::N7(u)), + map(int_t("i3"), |u| U::I3(u)), + map(int_t("i6"), |u| U::I6(u)), + map(int_t("i7"), |u| U::I7(u)), + + // less common + map(uint_t("n2"), |u| U::N3(u)), + map(uint_t("n4"), |u| U::N6(u)), + map(uint_t("n5"), |u| U::N6(u)), + map(int_t("i1"), |u| U::I3(u)), + map(int_t("i2"), |u| U::I3(u)), + map(int_t("i4"), |u| U::I6(u)), + map(int_t("i5"), |u| U::I6(u)), + // TODO: 8, 9 not supported + ))(s) + } + + pub fn t_t(s: &[u8]) -> IResult<&[u8], T> { + alt(( + text, + binary(), + map(unit_t, |_| T::Unit), + map(tag_t, |t| T::Sum(t)), + map(list_t, |l| T::List(l)), + map(record_t, |p| T::Record(p)), + + map(bool_t(), |u| T::N1(u)), + // 8, 64 and 128 bit + map(uint_t("n3"), |u| T::N3(u)), + map(uint_t("n6"), |u| T::N6(u)), + map(uint_t("n7"), |u| T::N7(u)), + map(int_t("i3"), |u| T::I3(u)), + map(int_t("i6"), |u| T::I6(u)), + map(int_t("i7"), |u| T::I7(u)), + + // less common + map(uint_t("n2"), |u| T::N3(u)), + map(uint_t("n4"), |u| T::N6(u)), + map(uint_t("n5"), |u| T::N6(u)), + map(int_t("i1"), |u| T::I3(u)), + map(int_t("i2"), |u| T::I3(u)), + map(int_t("i4"), |u| T::I6(u)), + map(int_t("i5"), |u| T::I6(u)), + // TODO: 8, 9 not supported + ))(s) + } + + #[cfg(test)] + mod tests { + use super::*; + + #[test] + fn test_parse_unit_t() { + assert_eq!( + unit_t("u,".as_bytes()), + Ok(("".as_bytes(), ())) + ); + } + + #[test] + fn test_parse_bool_t() { + assert_eq!( + bool_t()("n1:0,".as_bytes()), + Ok(("".as_bytes(), false)) + ); + assert_eq!( + bool_t()("n1:1,".as_bytes()), + Ok(("".as_bytes(), true)) + ); + } + + #[test] + fn test_parse_usize_t() { + assert_eq!( + usize_t("32foo".as_bytes()), + Ok(("foo".as_bytes(), 32)) + ); + } + + #[test] + fn test_parse_int_t() { + assert_eq!( + uint_t::<u8>("n3")("n3:42,abc".as_bytes()), + Ok(("abc".as_bytes(), 42)) + ); + assert_eq!( + uint_t::<u8>("n3")("n3:1024,abc".as_bytes()), + Err(nom::Err::Error(("1024,abc".as_bytes(), nom::error::ErrorKind::MapRes))) + ); + assert_eq!( + int_t::<i64>("i6")("i6:-23,abc".as_bytes()), + Ok(("abc".as_bytes(), -23)) + ); + assert_eq!( + int_t::<i128>("i3")("i3:0,:abc".as_bytes()), + Ok((":abc".as_bytes(), 0)) + ); + assert_eq!( + uint_t::<u8>("n7")("n7:09,".as_bytes()), + Ok(("".as_bytes(), 9)) + ); + // assert_eq!( + // length("c"), + // Err(nom::Err::Error(("c", nom::error::ErrorKind::Digit))) + // ); + // assert_eq!( + // length(":"), + // Err(nom::Err::Error((":", nom::error::ErrorKind::Digit))) + // ); + } + + #[test] + fn test_parse_text() { + assert_eq!( + text("t5:hello,".as_bytes()), + Ok(("".as_bytes(), T::Text("hello".to_owned()))) + ); + assert_eq!( + text("t4:fo,".as_bytes()), + // TODO: way better parse error messages + Err(nom::Err::Error(("fo,".as_bytes(), nom::error::ErrorKind::Eof))) + ); + assert_eq!( + text("t9:今日は,".as_bytes()), + Ok(("".as_bytes(), T::Text("今日は".to_owned()))) + ); + } + + #[test] + fn test_parse_binary() { + assert_eq!( + binary()("b5:hello,".as_bytes()), + Ok(("".as_bytes(), T::Binary(Vec::from("hello".to_owned())))) + ); + assert_eq!( + binary()("b4:fo,".as_bytes()), + // TODO: way better parse error messages + Err(nom::Err::Error(("fo,".as_bytes(), nom::error::ErrorKind::Eof))) + ); + assert_eq!( + binary()("b9:今日は,".as_bytes()), + Ok(("".as_bytes(), T::Binary(Vec::from("今日は".as_bytes())))) + ); + } + + #[test] + fn test_list() { + assert_eq!( + list_t("[0:]".as_bytes()), + Ok(("".as_bytes(), vec![])) + ); + assert_eq!( + list_t("[6:u,u,u,]".as_bytes()), + Ok(("".as_bytes(), vec![ + T::Unit, + T::Unit, + T::Unit, + ])) + ); + assert_eq!( + list_t("[15:u,[7:t3:foo,]u,]".as_bytes()), + Ok(("".as_bytes(), vec![ + T::Unit, + T::List(vec![T::Text("foo".to_owned())]), + T::Unit, + ])) + ); + } + + #[test] + fn test_record() { + assert_eq!( + record_t("{21:<1:a|u,<1:b|u,<1:c|u,}".as_bytes()), + Ok(("".as_bytes(), vec![ + ("a".to_owned(), T::Unit), + ("b".to_owned(), T::Unit), + ("c".to_owned(), T::Unit), + ].into_iter().collect::<HashMap<String, T>>())) + ); + // duplicated keys are ignored (first is taken) + assert_eq!( + record_t("{25:<1:a|u,<1:b|u,<1:a|i1:-1,}".as_bytes()), + Ok(("".as_bytes(), vec![ + ("a".to_owned(), T::Unit), + ("b".to_owned(), T::Unit), + ].into_iter().collect::<HashMap<_,_>>())) + ); + } + + #[test] + fn test_parse() { + assert_eq!( + t_t("n3:255,".as_bytes()), + Ok(("".as_bytes(), T::N3(255))) + ); + assert_eq!( + t_t("t6:halloo,".as_bytes()), + Ok(("".as_bytes(), T::Text("halloo".to_owned()))) + ); + assert_eq!( + t_t("<3:foo|t6:halloo,".as_bytes()), + Ok(("".as_bytes(), T::Sum (Tag { + tag: "foo".to_owned(), + val: Box::new(T::Text("halloo".to_owned())) + }))) + ); + // { a: Unit + // , foo: List <A: Unit | B: List i3> } + assert_eq!( + t_t("{52:<1:a|u,<3:foo|[33:<1:A|u,<1:A|n1:1,<1:B|[7:i3:127,]]}".as_bytes()), + Ok(("".as_bytes(), T::Record(vec![ + ("a".to_owned(), T::Unit), + ("foo".to_owned(), T::List(vec![ + T::Sum(Tag { tag: "A".to_owned(), val: Box::new(T::Unit) }), + T::Sum(Tag { tag: "A".to_owned(), val: Box::new(T::N1(true)) }), + T::Sum(Tag { tag: "B".to_owned(), val: Box::new(T::List(vec![T::I3(127)])) }), + ])) + ].into_iter().collect::<HashMap<String, T>>()))) + ); + } + + } +} + +pub mod dec { + use super::*; + use std::collections::HashMap; + + pub struct DecodeError(pub String); + + pub trait Decoder<'a> { + type A; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError>; + } + + #[derive(Clone, Copy)] + pub struct AnyT; + #[derive(Clone, Copy)] + pub struct AnyU; + + impl<'a> Decoder<'a> for AnyT { + type A = T; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> { + Ok(u.to_t()) + } + } + + impl<'a> Decoder<'a> for AnyU { + type A = U<'a>; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> { + Ok(u) + } + } + + #[derive(Clone, Copy)] + pub struct Text; + // TODO: rename to Bytes + #[derive(Clone, Copy)] + pub struct Binary; + + impl<'a> Decoder<'a> for Text { + type A = &'a str; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> { + match u { + U::Text(t) => Ok(t), + other => Err(DecodeError(format!("Cannot decode {:?} into Text", other))), + } + } + } + + impl<'a> Decoder<'a> for Binary { + type A = &'a [u8]; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> { + match u { + U::Binary(b) => Ok(b), + other => Err(DecodeError(format!("Cannot decode {:?} into Binary", other))), + } + } + } + + #[derive(Clone, Copy)] + pub struct ScalarAsBytes; + + impl<'a> Decoder<'a> for ScalarAsBytes { + type A = Vec<u8>; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> { + match u { + U::N3(u) => Ok(format!("{}", u).into_bytes()), + U::N6(u) => Ok(format!("{}", u).into_bytes()), + U::N7(u) => Ok(format!("{}", u).into_bytes()), + U::I3(i) => Ok(format!("{}", i).into_bytes()), + U::I6(i) => Ok(format!("{}", i).into_bytes()), + U::I7(i) => Ok(format!("{}", i).into_bytes()), + U::Text(t) => Ok(t.as_bytes().to_owned()), + U::Binary(b) => Ok(b.to_owned()), + o => Err(DecodeError(format!("Cannot decode {:?} into scalar", o))), + } + } + } + + #[derive(Clone, Copy)] + pub struct Record<T>(pub T); + + impl<'a, Inner> Decoder<'a> for Record<Inner> + where Inner: Decoder<'a> + { + type A = HashMap<&'a str, Inner::A>; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> { + match u { + U::Record(map) => + map.into_iter() + .map(|(k, v)| self.0.dec(v).map(|v2| (k, v2))) + .collect::<Result<Self::A, _>>(), + o => Err(DecodeError(format!("Cannot decode {:?} into record", o))) + } + } + } + + #[derive(Clone, Copy)] + pub struct RecordDot<'a, T> { + pub field: &'a str, + pub inner: T + } + + impl <'a, Inner> Decoder<'a> for RecordDot<'_, Inner> + where Inner: Decoder<'a> + Clone + { + type A = Inner::A; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> { + match Record(self.inner.clone()).dec(u) { + Ok(mut map) => match map.remove(self.field) { + Some(inner) => Ok(inner), + None => Err(DecodeError(format!("Cannot find `{}` in record map", self.field))), + }, + Err(err) => Err(err), + } + } + } + + #[derive(Clone)] + pub struct OneOf<T, A>{ + pub inner: T, + pub list: Vec<A>, + } + + impl <'a, Inner> Decoder<'a> for OneOf<Inner, Inner::A> + where Inner: Decoder<'a>, + Inner::A: Display + Debug + PartialEq + { + type A = Inner::A; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> { + match self.inner.dec(u) { + Ok(inner) => match self.list.iter().any(|x| x.eq(&inner)) { + true => Ok(inner), + false => Err(DecodeError(format!("{} is not one of {:?}", inner, self.list))) + }, + Err(err) => Err(err) + } + } + } + + #[derive(Clone)] + pub struct Try<T>(pub T); + + impl <'a, Inner> Decoder<'a> for Try<Inner> + where Inner: Decoder<'a> + { + type A = Option<Inner::A>; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> { + match self.0.dec(u) { + Ok(inner) => Ok(Some(inner)), + Err(err) => Ok(None) + } + } + } + +} diff --git a/users/Profpatsch/netstring/README.md b/users/Profpatsch/netstring/README.md new file mode 100644 index 000000000000..b8daea11d158 --- /dev/null +++ b/users/Profpatsch/netstring/README.md @@ -0,0 +1,18 @@ +# Netstring + +Netstrings are a djb invention. They are intended as a serialization format. Instead of inline control characters like `\n` or `\0` to signal the end of a string, they use a run-length encoding given as the number of bytes, encoded in ASCII, at the beginning of the string. + +``` +hello -> 5:hello, +foo! -> 4:foo!, +こんにちは -> 15:こんにちは, +``` + +They can be used to encode e.g. lists by simply concatenating and reading them in one-by-one. + +If you need a more complex encoding, you could start encoding e.g. tuples as netstrings-in-netstrings, or you could use [`netencode`](../netencode/README.md) instead, which is what-if-json-but-netstrings, and takes the idea of netstrings to their logical conclusion. + +Resources: + +Spec: http://cr.yp.to/proto/netstrings.txt +Wiki: https://en.wikipedia.org/wiki/Netstring diff --git a/users/Profpatsch/netstring/default.nix b/users/Profpatsch/netstring/default.nix new file mode 100644 index 000000000000..3cf882d5a279 --- /dev/null +++ b/users/Profpatsch/netstring/default.nix @@ -0,0 +1,76 @@ +{ lib, pkgs, depot, ... }: +let + toNetstring = s: + "${toString (builtins.stringLength s)}:${s},"; + + toNetstringKeyVal = attrs: + lib.concatStrings + (lib.mapAttrsToList + (k: v: toNetstring (toNetstring k + toNetstring v)) + attrs); + + python-netstring = depot.users.Profpatsch.writers.python3Lib { + name = "netstring"; + } '' + def read_netstring(bytes): + (int_length, rest) = bytes.split(sep=b':', maxsplit=1) + val = rest[:int(int_length)] + # has to end on a , + assert(rest[len(val)] == ord(',')) + return (val, rest[len(val) + 1:]) + + def read_netstring_key_val(bytes): + (keyvalnet, rest) = read_netstring(bytes) + (key, valnet) = read_netstring(keyvalnet) + (val, nothing) = read_netstring(valnet) + assert(nothing == b"") + return (key, val, rest) + + def read_netstring_key_val_list(bytes): + rest = bytes + res = {} + while rest != b"": + (key, val, r) = read_netstring_key_val(rest) + rest = r + res[key] = val + return res + ''; + + rust-netstring = depot.users.Profpatsch.writers.rustSimpleLib { + name = "netstring"; + } '' + pub fn to_netstring(s: &[u8]) -> Vec<u8> { + let len = s.len(); + // length of the integer as ascii + let i_len = ((len as f64).log10() as usize) + 1; + let ns_len = i_len + 1 + len + 1; + let mut res = Vec::with_capacity(ns_len); + res.extend_from_slice(format!("{}:", len).as_bytes()); + res.extend_from_slice(s); + res.push(b','); + res + } + ''; + + tests = import ./tests.nix { + inherit + depot + pkgs + lib + python-netstring + rust-netstring + toNetstring + toNetstringKeyVal + ; + }; + +in { + inherit + toNetstring + toNetstringKeyVal + python-netstring + rust-netstring + tests + ; + +} diff --git a/users/Profpatsch/netstring/tests.nix b/users/Profpatsch/netstring/tests.nix new file mode 100644 index 000000000000..23141472d6a8 --- /dev/null +++ b/users/Profpatsch/netstring/tests.nix @@ -0,0 +1,61 @@ +{ depot, lib, pkgs, python-netstring, rust-netstring, toNetstring, toNetstringKeyVal }: + +let + + python-netstring-test = depot.users.Profpatsch.writers.python3 { + name = "python-netstring-test"; + libraries = p: [ + python-netstring + ]; + } '' + import netstring + + def assEq(left, right): + assert left == right, "{} /= {}".format(str(left), str(right)) + + assEq( + netstring.read_netstring(b"""${toNetstring "hi!"}"""), + (b"hi!", b"") + ) + + assEq( + netstring.read_netstring_key_val( + b"""${toNetstringKeyVal { foo = "42"; }}""" + ), + (b'foo', b'42', b"") + ) + + assEq( + netstring.read_netstring_key_val_list( + b"""${toNetstringKeyVal { foo = "42"; bar = "hi"; }}""" + ), + { b'foo': b'42', b'bar': b'hi' } + ) + ''; + + rust-netstring-test = depot.users.Profpatsch.writers.rustSimple { + name = "rust-netstring-test"; + dependencies = [ + rust-netstring + ]; + } '' + extern crate netstring; + + fn main() { + assert_eq!( + std::str::from_utf8(&netstring::to_netstring(b"hello")).unwrap(), + r##"${toNetstring "hello"}"## + ); + assert_eq!( + std::str::from_utf8(&netstring::to_netstring("こんにちは".as_bytes())).unwrap(), + r##"${toNetstring "こんにちは"}"## + ); + } + ''; + +in { + inherit + python-netstring-test + rust-netstring-test + ; +} diff --git a/users/Profpatsch/nixpkgs-rewriter/MetaStdenvLib.hs b/users/Profpatsch/nixpkgs-rewriter/MetaStdenvLib.hs new file mode 100644 index 000000000000..3ed96a7b6eac --- /dev/null +++ b/users/Profpatsch/nixpkgs-rewriter/MetaStdenvLib.hs @@ -0,0 +1,80 @@ +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE NamedFieldPuns #-} +import Nix.Parser +import Nix.Expr.Types +import Nix.Expr.Types.Annotated +import System.Environment (getArgs) +import System.Exit (die) +import Data.Fix (Fix(..)) +import qualified Data.Text as Text +import qualified Data.ByteString.Lazy.Char8 as BL +import qualified Data.Aeson as A +import qualified Data.Aeson.Encoding as A +import Data.Function ((&)) +import qualified System.IO as IO +import qualified Text.Megaparsec.Pos as MP + +main = do + (nixFile:_) <- getArgs + (parseNixFileLoc nixFile :: IO _) >>= \case + Failure err -> do + ePutStrLn $ show err + die "oh no" + Success expr -> do + case snd $ match expr of + NoArguments -> do + ePutStrLn $ "NoArguments in " <> nixFile + printPairs mempty + YesLib vars -> do + ePutStrLn $ "lib in " <> show vars <> " in " <> nixFile + printPairs mempty + NoLib vars srcSpan -> do + ePutStrLn $ nixFile <> " needs lib added" + printPairs + $ "fileName" A..= nixFile + <> "fromLine" A..= (srcSpan & spanBegin & sourceLine) + <> "fromColumn" A..= (srcSpan & spanBegin & sourceColumn) + <> "toLine" A..= (srcSpan & spanEnd & sourceLine) + <> "toColumn" A..= (srcSpan & spanEnd & sourceColumn) + +printPairs pairs = BL.putStrLn $ A.encodingToLazyByteString $ A.pairs pairs + +ePutStrLn = IO.hPutStrLn IO.stderr + +data Descend = YesDesc | NoDesc + deriving Show +data Matched = NoArguments | NoLib [VarName] SrcSpan | YesLib [VarName] + deriving Show + +match :: Fix (Compose (Ann SrcSpan) NExprF) -> (Descend, Matched) +match = \case + (AnnE outerSpan (NAbs (ParamSet params _ _) (AnnE innerSpan _))) -> (NoDesc, + let vars = map fst params in + case (any (== "lib") vars) of + True -> YesLib vars + False -> + -- The span of the arglist is from the beginning of the match + -- to the beginning of the inner expression + let varSpan = SrcSpan + { spanBegin = outerSpan & spanBegin + -- -1 to prevent the spans from overlapping + , spanEnd = sourcePosMinus1 (innerSpan & spanBegin) } + in NoLib vars varSpan) + _ -> (NoDesc, NoArguments) + +-- | Remove one from a source positon. +-- +-- That means if the current position is at the very beginning of a line, +-- jump to the previous line. +sourcePosMinus1 :: SourcePos -> SourcePos +sourcePosMinus1 src@(SourcePos { sourceLine, sourceColumn }) = + let + col = MP.mkPos $ max (MP.unPos sourceColumn - 1) 1 + line = MP.mkPos $ case MP.unPos sourceColumn of + 1 -> max (MP.unPos sourceLine - 1) 1 + _ -> MP.unPos sourceLine + in src + { sourceLine = line + , sourceColumn = col } diff --git a/users/Profpatsch/nixpkgs-rewriter/default.nix b/users/Profpatsch/nixpkgs-rewriter/default.nix new file mode 100644 index 000000000000..286530b03f94 --- /dev/null +++ b/users/Profpatsch/nixpkgs-rewriter/default.nix @@ -0,0 +1,113 @@ +{ depot, pkgs, ... }: +let + inherit (depot.nix) + writeExecline + ; + inherit (depot.users.Profpatsch.lib) + debugExec + eprintf + ; + + bins = depot.nix.getBins pkgs.coreutils [ "head" "shuf" ] + // depot.nix.getBins pkgs.jq [ "jq" ] + // depot.nix.getBins pkgs.findutils [ "xargs" ] + // depot.nix.getBins pkgs.gnused [ "sed" ] + ; + + export-json-object = pkgs.writers.writePython3 "export-json-object" {} '' + import json + import sys + import os + + d = json.load(sys.stdin) + + if d == {}: + sys.exit(0) + + for k, v in d.items(): + os.environ[k] = str(v) + + os.execvp(sys.argv[1], sys.argv[1:]) + ''; + + meta-stdenv-lib = pkgs.writers.writeHaskell "meta-stdenv-lib" { + libraries = [ + pkgs.haskellPackages.hnix + pkgs.haskellPackages.aeson + ]; + } ./MetaStdenvLib.hs; + + replace-between-lines = writeExecline "replace-between-lines" { readNArgs = 1; } [ + "importas" "-ui" "file" "fileName" + "importas" "-ui" "from" "fromLine" + "importas" "-ui" "to" "toLine" + "if" [ eprintf "%s-%s\n" "$from" "$to" ] + (debugExec "adding lib") + bins.sed + "-e" "\${from},\${to} \${1}" + "-i" "$file" + ]; + + add-lib-if-necessary = writeExecline "add-lib-if-necessary" { readNArgs = 1; } [ + "pipeline" [ meta-stdenv-lib "$1" ] + export-json-object + # first replace any stdenv.lib mentions in the arg header + # if this is not done, the replace below kills these. + # Since we want it anyway ultimately, let’s do it here. + "if" [ replace-between-lines "s/stdenv\.lib/lib/" ] + # then add the lib argument + # (has to be before stdenv, otherwise default arguments might be in the way) + replace-between-lines "s/stdenv/lib, stdenv/" + ]; + + metaString = ''meta = with stdenv.lib; {''; + + replace-stdenv-lib = pkgs.writers.writeBash "replace-stdenv-lib" '' + set -euo pipefail + sourceDir="$1" + for file in $( + ${pkgs.ripgrep}/bin/rg \ + --files-with-matches \ + --fixed-strings \ + -e '${metaString}' \ + "$sourceDir" + ) + do + echo "replacing stdenv.lib meta in $file" >&2 + ${bins.sed} -e '/${metaString}/ s/stdenv.lib/lib/' \ + -i "$file" + ${add-lib-if-necessary} "$file" + done + ''; + + instantiate-nixpkgs-randomly = writeExecline "instantiate-nixpkgs-randomly" { readNArgs = 1; } [ + "export" "NIXPKGS_ALLOW_BROKEN" "1" + "export" "NIXPKGS_ALLOW_UNFREE" "1" + "export" "NIXPKGS_ALLOW_INSECURE" "1" + "export" "NIXPKGS_ALLOW_UNSUPPORTED_SYSTEM" "1" + "pipeline" [ + "nix" + "eval" + "--raw" + ''( + let pkgs = import ''${1} {}; + in builtins.toJSON (builtins.attrNames pkgs) + )'' + ] + "pipeline" [ bins.jq "-r" ".[]" ] + "pipeline" [ bins.shuf ] + "pipeline" [ bins.head "-n" "1000" ] + bins.xargs "-I" "{}" "-n1" + "if" [ eprintf "instantiating %s\n" "{}" ] + "nix-instantiate" "$1" "-A" "{}" + ]; + +in { + inherit + instantiate-nixpkgs-randomly + # requires hnix, which we don’t want in tvl for now + # uncomment manually if you want to use it. + # meta-stdenv-lib + # replace-stdenv-lib + ; +} diff --git a/users/Profpatsch/read-http.nix b/users/Profpatsch/read-http.nix new file mode 100644 index 000000000000..614993c457e1 --- /dev/null +++ b/users/Profpatsch/read-http.nix @@ -0,0 +1,16 @@ +{ depot, pkgs, ... }: + +let + + read-http = depot.users.Profpatsch.writers.rustSimple { + name = "read-http"; + dependencies = [ + depot.users.Profpatsch.rust-crates.ascii + depot.users.Profpatsch.rust-crates.httparse + depot.users.Profpatsch.netencode.netencode-rs + depot.users.Profpatsch.arglib.netencode.rust + depot.users.Profpatsch.execline.exec-helpers + ]; + } (builtins.readFile ./read-http.rs); + +in read-http diff --git a/users/Profpatsch/read-http.rs b/users/Profpatsch/read-http.rs new file mode 100644 index 000000000000..50ff663b994a --- /dev/null +++ b/users/Profpatsch/read-http.rs @@ -0,0 +1,211 @@ +extern crate httparse; +extern crate netencode; +extern crate arglib_netencode; +extern crate ascii; +extern crate exec_helpers; + +use std::os::unix::io::FromRawFd; +use std::io::Read; +use std::io::Write; +use std::collections::HashMap; +use exec_helpers::{die_user_error, die_expected_error, die_temporary}; + +use netencode::{U, T, dec}; +use netencode::dec::Decoder; + +enum What { + Request, + Response +} + +// reads a http request (stdin), and writes all headers to stdout, as netencoded record. +// The keys are text, but can be lists of text iff headers appear multiple times, so beware. +fn main() -> std::io::Result<()> { + + exec_helpers::no_args("read-http"); + + let args = dec::RecordDot { + field: "what", + inner: dec::OneOf { + list: vec!["request", "response"], + inner: dec::Text + } + }; + let what : What = match args.dec(arglib_netencode::arglib_netencode("read-http", None).to_u()) { + Ok("request") => What::Request, + Ok("response") => What::Response, + Ok(v) => panic!("shouldn’t happen!, value was: {}", v), + Err(dec::DecodeError(err)) => die_user_error("read-http", err), + }; + + fn read_stdin_to_complete<F>(mut parse: F) -> () + where F: FnMut(&[u8]) -> httparse::Result<usize> + { + let mut res = httparse::Status::Partial; + loop { + if let httparse::Status::Complete(_) = res { + return; + } + let mut buf = [0; 2048]; + match std::io::stdin().read(&mut buf[..]) { + Ok(size) => if size == 0 { + break; + }, + Err(err) => die_temporary("read-http", format!("could not read from stdin, {:?}", err)) + } + match parse(&buf) { + Ok(status) => { + res = status; + } + Err(err) => die_temporary("read-http", format!("httparse parsing failed: {:#?}", err)) + } + } + } + + fn normalize_headers<'a>(headers: &'a [httparse::Header]) -> HashMap<String, U<'a>> { + let mut res = HashMap::new(); + for httparse::Header { name, value } in headers { + let val = ascii::AsciiStr::from_ascii(*value) + .expect(&format!("read-http: we require header values to be ASCII, but the header {} was {:?}", name, value)) + .as_str(); + // lowercase the header names, since the standard doesn’t care + // and we want unique strings to match against + let name_lower = name.to_lowercase(); + match res.insert(name_lower, U::Text(val)) { + None => (), + Some(U::Text(t)) => { + let name_lower = name.to_lowercase(); + let _ = res.insert(name_lower, U::List(vec![U::Text(t), U::Text(val)])); + () + }, + Some(U::List(mut l)) => { + let name_lower = name.to_lowercase(); + l.push(U::Text(val)); + let _ = res.insert(name_lower, U::List(l)); + () + }, + Some(o) => panic!("read-http: header not text nor list: {:?}", o), + } + } + res + } + + // tries to read until the end of the http header (deliniated by two newlines "\r\n\r\n") + fn read_till_end_of_header<R: Read>(buf: &mut Vec<u8>, reader: R) -> Option<()> { + let mut chonker = Chunkyboi::new(reader, 4096); + loop { + // TODO: attacker can send looooong input, set upper maximum + match chonker.next() { + Some(Ok(chunk)) => { + buf.extend_from_slice(&chunk); + if chunk.windows(4).any(|c| c == b"\r\n\r\n" ) { + return Some(()); + } + }, + Some(Err(err)) => die_temporary("read-http", format!("error reading from stdin: {:?}", err)), + None => return None + } + } + } + + // max header size chosen arbitrarily + let mut headers = [httparse::EMPTY_HEADER; 128]; + let stdin = std::io::stdin(); + + match what { + Request => { + let mut req = httparse::Request::new(&mut headers); + let mut buf: Vec<u8> = vec![]; + match read_till_end_of_header(&mut buf, stdin.lock()) { + Some(()) => match req.parse(&buf) { + Ok(httparse::Status::Complete(_body_start)) => {}, + Ok(httparse::Status::Partial) => die_expected_error("read-http", "httparse should have gotten a full header"), + Err(err) => die_expected_error("read-http", format!("httparse response parsing failed: {:#?}", err)) + }, + None => die_expected_error("read-http", format!("httparse end of stdin reached before able to parse request headers")) + } + let method = req.method.expect("method must be filled on complete parse"); + let path = req.path.expect("path must be filled on complete parse"); + write_dict_req(method, path, &normalize_headers(req.headers)) + }, + Response => { + let mut resp = httparse::Response::new(&mut headers); + let mut buf: Vec<u8> = vec![]; + match read_till_end_of_header(&mut buf, stdin.lock()) { + Some(()) => match resp.parse(&buf) { + Ok(httparse::Status::Complete(_body_start)) => {}, + Ok(httparse::Status::Partial) => die_expected_error("read-http", "httparse should have gotten a full header"), + Err(err) => die_expected_error("read-http", format!("httparse response parsing failed: {:#?}", err)) + }, + None => die_expected_error("read-http", format!("httparse end of stdin reached before able to parse response headers")) + } + let code = resp.code.expect("code must be filled on complete parse"); + let reason = resp.reason.expect("reason must be filled on complete parse"); + write_dict_resp(code, reason, &normalize_headers(resp.headers)) + } + } +} + +fn write_dict_req<'a, 'buf>(method: &'buf str, path: &'buf str, headers: &'a HashMap<String, U<'a>>) -> std::io::Result<()> { + let mut http = vec![ + ("method", U::Text(method)), + ("path", U::Text(path)), + ].into_iter().collect(); + write_dict(http, headers) +} + +fn write_dict_resp<'a, 'buf>(code: u16, reason: &'buf str, headers: &'a HashMap<String, U<'a>>) -> std::io::Result<()> { + let mut http = vec![ + ("status", U::N6(code as u64)), + ("status-text", U::Text(reason)), + ].into_iter().collect(); + write_dict(http, headers) +} + + +fn write_dict<'buf, 'a>(mut http: HashMap<&str, U<'a>>, headers: &'a HashMap<String, U<'a>>) -> std::io::Result<()> { + match http.insert("headers", U::Record( + headers.iter().map(|(k,v)| (k.as_str(), v.clone())).collect() + )) { + None => (), + Some(_) => panic!("read-http: headers already in dict"), + }; + netencode::encode( + &mut std::io::stdout(), + &U::Record(http) + )?; + Ok(()) +} + + +// iter helper + +struct Chunkyboi<T> { + inner: T, + buf: Vec<u8>, +} + +impl<R: Read> Chunkyboi<R> { + fn new(inner: R, chunksize: usize) -> Self { + let buf = vec![0; chunksize]; + Chunkyboi { + inner, + buf + } + } +} + +impl<R: Read> Iterator for Chunkyboi<R> { + type Item = std::io::Result<Vec<u8>>; + + fn next(&mut self) -> Option<std::io::Result<Vec<u8>>> { + match self.inner.read(&mut self.buf) { + Ok(0) => None, + Ok(read) => { + // clone a new buffer so we can reuse the internal one + Some(Ok(self.buf[..read].to_owned())) + } + Err(err) => Some(Err(err)) + } + } +} diff --git a/users/Profpatsch/rust-crates.nix b/users/Profpatsch/rust-crates.nix new file mode 100644 index 000000000000..e6a191f70f4b --- /dev/null +++ b/users/Profpatsch/rust-crates.nix @@ -0,0 +1,122 @@ +{ depot, pkgs, ... }: +rec { + cfg-if = pkgs.buildRustCrate { + pname = "cfg-if"; + crateName = "cfg-if"; + version = "1.0.0"; + sha256 = "1fzidq152hnxhg4lj6r2gv4jpnn8yivp27z6q6xy7w6v0dp6bai9"; + }; + + cc = pkgs.buildRustCrate { + pname = "cc"; + crateName = "cc"; + version = "1.0.66"; + sha256 = "12q71z6ck8wlqrwgi25x3lrryyks9djymswn9b1c6qq0i01jpc1p"; + }; + + ascii = pkgs.buildRustCrate { + pname = "ascii"; + crateName = "ascii"; + version = "1.0.0"; + sha256 = "0gam8xsn981wfa40srsniivffjsfz1pg0xnigmczk9k7azb1ks1m"; + }; + + regex-syntax = pkgs.buildRustCrate { + pname = "regex-syntax"; + crateName = "regex-syntax"; + version = "0.6.22"; + sha256 = "0r00n2dgyixacl1sczqp18gxf0xh7x272hcdp62412lypba2gqyg"; + }; + + regex = pkgs.buildRustCrate { + pname = "regex"; + crateName = "regex"; + version = "1.4.3"; + features = [ "std" ]; + dependencies = [ regex-syntax ]; + sha256 = "0w0b4bh0ng20lf5y8raaxmxj46ikjqpgwy1iggzpby9lhv9vydkp"; + }; + + libloading = pkgs.buildRustCrate { + pname = "libloading"; + crateName = "libloading"; + version = "0.6.7"; + dependencies = [ cfg-if ]; + sha256 = "111d8zsizswnxiqn43vcgnc2ym9spsx1i6pcfp35ca3yw2ixq95j"; + }; + + tree-sitter = pkgs.buildRustCrate { + pname = "tree_sitter"; + crateName = "tree-sitter"; + # buildRustCrate isn’t really smart enough to detect the subdir + libPath = "binding_rust/lib.rs"; + # and the build.rs is also not where buildRustCrate would find it + build = "binding_rust/build.rs"; + version = "0.17.1"; + dependencies = [ regex ]; + buildDependencies = [ cc ]; + sha256 = "0jwwbvs4icpra7m1ycvnyri5h3sbw4qrfvgnnvnk72h4w93qhzhr"; + }; + + libc = pkgs.buildRustCrate { + pname = "libc"; + crateName = "libc"; + version = "0.2.82"; + sha256 = "02zgn6c0xwh331hky417lbr29kmvrw3ylxs8822syyhjfjqszvsx"; + }; + + bitflags = pkgs.buildRustCrate { + pname = "bitflags"; + crateName = "bitflags"; + version = "1.2.1"; + sha256 = "0b77awhpn7yaqjjibm69ginfn996azx5vkzfjj39g3wbsqs7mkxg"; + }; + + inotify-sys = pkgs.buildRustCrate { + pname = "inotify-sys"; + crateName = "inotify-sys"; + version = "0.1.5"; + dependencies = [ libc ]; + sha256 = "1yiy577xxhi0j90nbg9nkd8cqwc1xix62rz55jjngvxa5jl5613v"; + }; + + inotify = pkgs.buildRustCrate { + pname = "inotify"; + crateName = "inotify"; + version = "0.9.2"; + edition = "2018"; + dependencies = [ bitflags libc inotify-sys ]; + sha256 = "0fcknyvknglwwk1pdzdlb4m0ry2dym1yx8r5prf2v00pxnjk0hv2"; + }; + + httparse = pkgs.buildRustCrate { + pname = "httparse"; + version = "1.3.4"; + crateName = "httparse"; + sha256 = "0dggj4s0cq69bn63q9nqzzay5acmwl33nrbhjjsh5xys8sk2x4jw"; + }; + + version-check = pkgs.buildRustCrate { + pname = "version-check"; + version = "0.9.2"; + crateName = "version-check"; + sha256 = "1vwvc1mzwv8ana9jv8z933p2xzgj1533qwwl5zr8mi89azyhq21v"; + }; + + memchr = pkgs.buildRustCrate { + pname = "memchr"; + version = "2.3.3"; + crateName = "memchr"; + sha256 = "1ivxvlswglk6wd46gadkbbsknr94gwryk6y21v64ja7x4icrpihw"; + }; + nom = pkgs.buildRustCrate { + pname = "nom"; + version = "5.1.1"; + crateName = "nom"; + sha256 = "1gb4r6mjwd645jqh02nhn60i7qkw8cgy3xq1r4clnmvz3cmkv1l0"; + dependencies = [ memchr ]; + buildDependencies = [ version-check ]; + features = [ "std" "alloc" ]; + }; + +} diff --git a/users/Profpatsch/tree-sitter.nix b/users/Profpatsch/tree-sitter.nix new file mode 100644 index 000000000000..099fa2d5b7b9 --- /dev/null +++ b/users/Profpatsch/tree-sitter.nix @@ -0,0 +1,179 @@ +{ depot, pkgs, lib, ... }: + +let + bins = depot.nix.getBins pkgs.coreutils [ "head" "printf" "cat" ] + // depot.nix.getBins pkgs.ncurses [ "tput" ] + // depot.nix.getBins pkgs.bc [ "bc" ] + // depot.nix.getBins pkgs.ocamlPackages.sexp [ "sexp" ]; + + print-ast = depot.users.Profpatsch.writers.rustSimple { + name = "print-ast"; + dependencies = with depot.users.Profpatsch.rust-crates; [ + libloading + tree-sitter + ]; + } '' + extern crate libloading; + extern crate tree_sitter; + use std::mem; + use std::io::{Read}; + use libloading::{Library, Symbol}; + use tree_sitter::{Language, Parser}; + + /// Load the shared lib FILE and return the language under SYMBOL-NAME. + /// Inspired by the rust source of emacs-tree-sitter. + fn _load_language(file: String, symbol_name: String) -> Result<Language, libloading::Error> { + let lib = Library::new(file)?; + let tree_sitter_lang: Symbol<'_, unsafe extern "C" fn() -> _> = + unsafe { lib.get(symbol_name.as_bytes())? }; + let language: Language = unsafe { tree_sitter_lang() }; + // Avoid segmentation fault by not unloading the lib, as language is a static piece of data. + // TODO: Attach an Rc<Library> to Language instead. + mem::forget(lib); + Ok(language) + } + + fn main() { + let mut args = std::env::args(); + let so = args.nth(1).unwrap(); + let symbol_name = args.nth(0).unwrap(); + let file = args.nth(0).unwrap(); + let mut parser = Parser::new(); + let lang = _load_language(so, symbol_name).unwrap(); + parser.set_language(lang).unwrap(); + let bytes = std::fs::read(&file).unwrap(); + print!("{}", parser.parse(&bytes, None).unwrap().root_node().to_sexp()); + } + + + ''; + + tree-sitter-nix = buildTreeSitterGrammar { + language = "tree-sitter-nix"; + source = pkgs.fetchFromGitHub { + owner = "cstrahan"; + repo = "tree-sitter-nix"; + rev = "791b5ff0e4f0da358cbb941788b78d436a2ca621"; + sha256 = "1y5b3wh3fcmbgq8r2i97likzfp1zp02m58zacw5a1cjqs5raqz66"; + }; + }; + + watch-file-modified = depot.users.Profpatsch.writers.rustSimple { + name = "watch-file-modified"; + dependencies = [ + depot.users.Profpatsch.rust-crates.inotify + depot.users.Profpatsch.netstring.rust-netstring + ]; + } '' + extern crate inotify; + extern crate netstring; + use inotify::{EventMask, WatchMask, Inotify}; + use std::io::Write; + + fn main() { + let mut inotify = Inotify::init() + .expect("Failed to initialize inotify"); + + let file = std::env::args().nth(1).unwrap(); + + let file_watch = inotify + .add_watch( + &file, + WatchMask::MODIFY + ) + .expect("Failed to add inotify watch"); + + let mut buffer = [0u8; 4096]; + loop { + let events = inotify + .read_events_blocking(&mut buffer) + .expect("Failed to read inotify events"); + + for event in events { + if event.wd == file_watch { + std::io::stdout().write(&netstring::to_netstring(file.as_bytes())); + std::io::stdout().flush(); + } + } + } + } + + ''; + + # clear screen and set LINES and COLUMNS to terminal height & width + clear-screen = depot.nix.writeExecline "clear-screen" {} [ + "if" [ bins.tput "clear" ] + "backtick" "-in" "LINES" [ bins.tput "lines" ] + "backtick" "-in" "COLUMNS" [ bins.tput "cols" ] + "$@" + ]; + + print-nix-file = depot.nix.writeExecline "print-nix-file" { readNArgs = 1; } [ + "pipeline" [ print-ast "${tree-sitter-nix}/parser" "tree_sitter_nix" "$1" ] + "pipeline" [ bins.sexp "print" ] + clear-screen + "importas" "-ui" "lines" "LINES" + "backtick" "-in" "ls" [ + "pipeline" + # when you pull out bc to decrement an integer it’s time to switch to python lol + [ bins.printf "x=%s; --x\n" "$lines" ] + bins.bc + ] + "importas" "-ui" "l" "ls" + bins.head "-n\${l}" + ]; + + print-nix-file-on-update = depot.nix.writeExecline "print-nix-file-on-update" { readNArgs = 1; } [ + "if" [ print-nix-file "$1" ] + "pipeline" [ watch-file-modified "$1" ] + "forstdin" "-d" "" "file" + "importas" "file" "file" + print-nix-file "$file" + ]; + + # copied from nixpkgs + buildTreeSitterGrammar = + { + # language name + language + # source for the language grammar + , source + }: + + pkgs.stdenv.mkDerivation { + + pname = "${language}-grammar"; + inherit (pkgs.tree-sitter) version; + + src = source; + + buildInputs = [ pkgs.tree-sitter ]; + + dontUnpack = true; + configurePhase= ":"; + buildPhase = '' + runHook preBuild + scanner_cc="$src/src/scanner.cc" + if [ ! -f "$scanner_cc" ]; then + scanner_cc="" + fi + $CXX -I$src/src/ -c $scanner_cc + $CC -I$src/src/ -shared -o parser -Os scanner.o $src/src/parser.c -lstdc++ + runHook postBuild + ''; + installPhase = '' + runHook preInstall + mkdir $out + mv parser $out/ + runHook postInstall + ''; + }; + +in { + inherit + print-ast + tree-sitter-nix + print-nix-file-on-update + watch-file-modified + ; +} diff --git a/users/Profpatsch/writers/default.nix b/users/Profpatsch/writers/default.nix new file mode 100644 index 000000000000..3888579a64d8 --- /dev/null +++ b/users/Profpatsch/writers/default.nix @@ -0,0 +1,164 @@ +{ depot, pkgs, lib, ... }: +let + bins = depot.nix.getBins pkgs.coreutils ["printf" "mkdir" "cat" "ln" "ls" "touch" ]; + + inherit (depot.nix.yants) defun struct restrict attrs list string drv any; + + inherit (depot.nix) drvSeqL; + + FlakeError = + restrict + "flake error" + (s: lib.any (prefix: (builtins.substring 0 1 s) == prefix) + [ "E" "W" ]) + string; + Libraries = defun [ (attrs any) (list drv) ]; + + python3 = { + name, + libraries ? (_: []), + flakeIgnore ? [] + }: pkgs.writers.writePython3 name { + libraries = Libraries libraries pkgs.python3Packages; + flakeIgnore = + let ignoreTheseErrors = [ + # whitespace after { + "E201" + # whitespace before } + "E202" + # fuck 4-space indentation + "E121" "E111" + # who cares about blank lines … + # … at end of files + "W391" + # … between functions + "E302" "E305" + ]; + in list FlakeError (ignoreTheseErrors ++ flakeIgnore); + }; + + # TODO: add the same flake check as the pyhon3 writer + python3Lib = { name, libraries ? (_: []) }: moduleString: + let srcTree = depot.nix.runExecline.local name { stdin = moduleString; } [ + "importas" "out" "out" + "if" [ bins.mkdir "-p" "\${out}/${name}" ] + "if" [ + "redirfd" "-w" "1" "\${out}/setup.py" + bins.printf '' + from distutils.core import setup + + setup( + name='%s', + packages=['%s'] + ) + '' name name + ] + "if" [ + # redirect stdin to the init py + "redirfd" "-w" "1" "\${out}/${name}/__init__.py" + bins.cat + ] + ]; + in pkgs.python3Packages.buildPythonPackage { + inherit name; + src = srcTree; + propagatedBuildInputs = libraries pkgs.python3Packages; + doCheck = false; + }; + + rustSimple = args@{name, ...}: src: + linkTo name "${rustSimpleBin args src}/bin/${name}"; + + linkTo = name: path: depot.nix.runExecline.local name {} [ + "importas" "out" "out" + bins.ln "-sT" path "$out" + ]; + + rustSimpleBin = { + name, + dependencies ? [], + ... + }@args: src: pkgs.buildRustCrate ({ + pname = name; + version = "1.0.0"; + crateName = name; + crateBin = [ name ]; + dependencies = dependencies; + src = pkgs.runCommandLocal "write-main.rs" { + src = src; + passAsFile = [ "src" ]; + } '' + mkdir -p $out/src/bin + cp "$srcPath" $out/src/bin/${name}.rs + find $out + ''; + } // args); + + rustSimpleLib = { + name, + dependencies ? [], + ... + }@args: src: pkgs.buildRustCrate ({ + pname = name; + version = "1.0.0"; + crateName = name; + dependencies = dependencies; + src = pkgs.runCommandLocal "write-lib.rs" { + src = src; + passAsFile = [ "src" ]; + } '' + mkdir -p $out/src + cp "$srcPath" $out/src/lib.rs + find $out + ''; + } // args); + + /* Takes a `buildRustCrate` derivation as an input, + * builds it with `{ buildTests = true; }` and runs + * all tests found in its `tests` dir. If they are + * all successful, `$out` will point to the crate + * built with `{ buildTests = false; }`, otherwise + * it will fail to build. + * + * See also `nix.drvSeqL` which is used to implement + * this behavior. + */ + testRustSimple = rustDrv: + let + crate = buildTests: rustDrv.override { inherit buildTests; }; + tests = depot.nix.runExecline.local "${rustDrv.name}-tests-run" {} [ + "importas" "out" "out" + "if" [ + "pipeline" [ bins.ls "${crate true}/tests" ] + "forstdin" "test" + "importas" "test" "test" + "${crate true}/tests/$test" + ] + bins.touch "$out" + ]; + in drvSeqL [ tests ] (crate false); + + + tests = import ./tests.nix { + inherit + depot + pkgs + python3 + python3Lib + rustSimpleLib + rustSimple + testRustSimple + ; + }; + +in { + inherit + python3 + python3Lib + rustSimple + rustSimpleBin + rustSimpleLib + testRustSimple + tests + ; +} diff --git a/users/Profpatsch/writers/tests.nix b/users/Profpatsch/writers/tests.nix new file mode 100644 index 000000000000..680c37a2ec02 --- /dev/null +++ b/users/Profpatsch/writers/tests.nix @@ -0,0 +1,85 @@ +{ depot, pkgs, python3, python3Lib, rustSimpleLib, rustSimple, testRustSimple }: + +let + run = drv: depot.nix.runExecline.local "run-${drv.name}" {} [ + "if" [ drv ] + "importas" "out" "out" + "${pkgs.coreutils}/bin/touch" "$out" + ]; + + pythonTransitiveLib = python3Lib { + name = "transitive"; + } '' + def transitive(s): + return s + " 1 2 3" + ''; + + pythonTestLib = python3Lib { + name = "test_lib"; + libraries = _: [ pythonTransitiveLib ]; + } '' + import transitive + def test(): + return transitive.transitive("test") + ''; + + pythonWithLib = run (python3 { + name = "python-with-lib"; + libraries = _: [ pythonTestLib ]; + } '' + import test_lib + + assert(test_lib.test() == "test 1 2 3") + ''); + + + rustTransitiveLib = testRustSimple (rustSimpleLib { + name = "transitive"; + } '' + pub fn transitive(s: &str) -> String { + let mut new = s.to_string(); + new.push_str(" 1 2 3"); + new + } + + #[cfg(test)] + mod tests { + use super::*; + + #[test] + fn test_transitive() { + assert_eq!(transitive("foo").as_str(), "foo 1 2 3") + } + } + ''); + + rustTestLib = rustSimpleLib { + name = "test_lib"; + dependencies = [ rustTransitiveLib ]; + } '' + extern crate transitive; + use transitive::{transitive}; + pub fn test() -> String { + transitive("test") + } + ''; + + rustWithLib = run (rustSimple { + name = "rust-with-lib"; + dependencies = [ rustTestLib ]; + } '' + extern crate test_lib; + + fn main() { + assert_eq!(test_lib::test(), String::from("test 1 2 3")); + } + ''); + + +in { + inherit + pythonWithLib + rustTransitiveLib + rustWithLib + ; +} |